1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
81 #define TARGET_MEM_FUNCTIONS 0
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 static tree placeholder_list = 0;
96 /* This structure is used by move_by_pieces to describe the move to
107 int explicit_inc_from;
108 unsigned HOST_WIDE_INT len;
109 HOST_WIDE_INT offset;
113 /* This structure is used by store_by_pieces to describe the clear to
116 struct store_by_pieces
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
124 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
129 static rtx enqueue_insn PARAMS ((rtx, rtx));
130 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
131 PARAMS ((unsigned HOST_WIDE_INT,
133 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
134 struct move_by_pieces *));
135 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
136 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
137 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
138 static tree emit_block_move_libcall_fn PARAMS ((int));
139 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
148 struct store_by_pieces *));
149 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
150 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
151 static tree clear_storage_libcall_fn PARAMS ((int));
152 static rtx compress_float_constant PARAMS ((rtx, rtx));
153 static rtx get_subtarget PARAMS ((rtx));
154 static int is_zeros_p PARAMS ((tree));
155 static int mostly_zeros_p PARAMS ((tree));
156 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
157 HOST_WIDE_INT, enum machine_mode,
158 tree, tree, int, int));
159 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int, tree,
164 static rtx var_rtx PARAMS ((tree));
165 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
166 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
167 static int is_aligning_offset PARAMS ((tree, tree));
168 static rtx expand_increment PARAMS ((tree, int, int));
169 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
170 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
171 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
179 /* Record for each mode whether we can move a register directly to or
180 from an object of that mode in memory. If we can't, we won't try
181 to use that mode directly when accessing a field of that mode. */
183 static char direct_load[NUM_MACHINE_MODES];
184 static char direct_store[NUM_MACHINE_MODES];
186 /* Record for each mode whether we can float-extend from memory. */
188 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
190 /* If a memory-to-memory move would take MOVE_RATIO or more simple
191 move-instruction sequences, we will do a movstr or libcall instead. */
194 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
197 /* If we are optimizing for space (-Os), cut down the default move ratio. */
198 #define MOVE_RATIO (optimize_size ? 3 : 15)
202 /* This macro is used to determine whether move_by_pieces should be called
203 to perform a structure copy. */
204 #ifndef MOVE_BY_PIECES_P
205 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
206 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
209 /* If a clear memory operation would take CLEAR_RATIO or more simple
210 move-instruction sequences, we will do a clrstr or libcall instead. */
213 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
214 #define CLEAR_RATIO 2
216 /* If we are optimizing for space, cut down the default clear ratio. */
217 #define CLEAR_RATIO (optimize_size ? 3 : 15)
221 /* This macro is used to determine whether clear_by_pieces should be
222 called to clear storage. */
223 #ifndef CLEAR_BY_PIECES_P
224 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
225 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
228 /* This array records the insn_code of insns to perform block moves. */
229 enum insn_code movstr_optab[NUM_MACHINE_MODES];
231 /* This array records the insn_code of insns to perform block clears. */
232 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
234 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
236 #ifndef SLOW_UNALIGNED_ACCESS
237 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
247 enum machine_mode mode;
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
255 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg = gen_rtx_REG (VOIDmode, -1);
262 insn = rtx_alloc (INSN);
263 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264 PATTERN (insn) = pat;
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
273 PUT_MODE (mem1, mode);
274 PUT_MODE (reg, mode);
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
284 if (! HARD_REGNO_MODE_OK (regno, mode))
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
311 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314 mode = GET_MODE_WIDER_MODE (mode))
316 enum machine_mode srcmode;
317 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
318 srcmode = GET_MODE_WIDER_MODE (srcmode))
322 ic = can_extend_p (mode, srcmode, 0);
323 if (ic == CODE_FOR_nothing)
326 PUT_MODE (mem, srcmode);
328 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329 float_extend_from_mem[mode][srcmode] = true;
334 /* This is run at the start of compiling a function. */
339 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
342 pending_stack_adjust = 0;
343 stack_pointer_delta = 0;
344 inhibit_defer_pop = 0;
346 apply_args_value = 0;
350 /* Small sanity check that the queue is empty at the end of a function. */
353 finish_expr_for_function ()
359 /* Manage the queue of increment instructions to be output
360 for POSTINCREMENT_EXPR expressions, etc. */
362 /* Queue up to increment (or change) VAR later. BODY says how:
363 BODY should be the same thing you would pass to emit_insn
364 to increment right away. It will go to emit_insn later on.
366 The value is a QUEUED expression to be used in place of VAR
367 where you want to guarantee the pre-incrementation value of VAR. */
370 enqueue_insn (var, body)
373 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
374 body, pending_chain);
375 return pending_chain;
378 /* Use protect_from_queue to convert a QUEUED expression
379 into something that you can put immediately into an instruction.
380 If the queued incrementation has not happened yet,
381 protect_from_queue returns the variable itself.
382 If the incrementation has happened, protect_from_queue returns a temp
383 that contains a copy of the old value of the variable.
385 Any time an rtx which might possibly be a QUEUED is to be put
386 into an instruction, it must be passed through protect_from_queue first.
387 QUEUED expressions are not meaningful in instructions.
389 Do not pass a value through protect_from_queue and then hold
390 on to it for a while before putting it in an instruction!
391 If the queue is flushed in between, incorrect code will result. */
394 protect_from_queue (x, modify)
398 RTX_CODE code = GET_CODE (x);
400 #if 0 /* A QUEUED can hang around after the queue is forced out. */
401 /* Shortcut for most common case. */
402 if (pending_chain == 0)
408 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
409 use of autoincrement. Make a copy of the contents of the memory
410 location rather than a copy of the address, but not if the value is
411 of mode BLKmode. Don't modify X in place since it might be
413 if (code == MEM && GET_MODE (x) != BLKmode
414 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
417 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
421 rtx temp = gen_reg_rtx (GET_MODE (x));
423 emit_insn_before (gen_move_insn (temp, new),
428 /* Copy the address into a pseudo, so that the returned value
429 remains correct across calls to emit_queue. */
430 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
433 /* Otherwise, recursively protect the subexpressions of all
434 the kinds of rtx's that can contain a QUEUED. */
437 rtx tem = protect_from_queue (XEXP (x, 0), 0);
438 if (tem != XEXP (x, 0))
444 else if (code == PLUS || code == MULT)
446 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
447 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
448 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
457 /* If the increment has not happened, use the variable itself. Copy it
458 into a new pseudo so that the value remains correct across calls to
460 if (QUEUED_INSN (x) == 0)
461 return copy_to_reg (QUEUED_VAR (x));
462 /* If the increment has happened and a pre-increment copy exists,
464 if (QUEUED_COPY (x) != 0)
465 return QUEUED_COPY (x);
466 /* The increment has happened but we haven't set up a pre-increment copy.
467 Set one up now, and use it. */
468 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
469 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
471 return QUEUED_COPY (x);
474 /* Return nonzero if X contains a QUEUED expression:
475 if it contains anything that will be altered by a queued increment.
476 We handle only combinations of MEM, PLUS, MINUS and MULT operators
477 since memory addresses generally contain only those. */
483 enum rtx_code code = GET_CODE (x);
489 return queued_subexp_p (XEXP (x, 0));
493 return (queued_subexp_p (XEXP (x, 0))
494 || queued_subexp_p (XEXP (x, 1)));
500 /* Perform all the pending incrementations. */
506 while ((p = pending_chain))
508 rtx body = QUEUED_BODY (p);
510 switch (GET_CODE (body))
518 QUEUED_INSN (p) = body;
522 #ifdef ENABLE_CHECKING
529 QUEUED_INSN (p) = emit_insn (body);
533 pending_chain = QUEUED_NEXT (p);
537 /* Copy data from FROM to TO, where the machine modes are not the same.
538 Both modes may be integer, or both may be floating.
539 UNSIGNEDP should be nonzero if FROM is an unsigned type.
540 This causes zero-extension instead of sign-extension. */
543 convert_move (to, from, unsignedp)
547 enum machine_mode to_mode = GET_MODE (to);
548 enum machine_mode from_mode = GET_MODE (from);
549 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
550 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
554 /* rtx code for making an equivalent value. */
555 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
556 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
558 to = protect_from_queue (to, 1);
559 from = protect_from_queue (from, 0);
561 if (to_real != from_real)
564 /* If FROM is a SUBREG that indicates that we have already done at least
565 the required extension, strip it. We don't handle such SUBREGs as
568 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
569 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
570 >= GET_MODE_SIZE (to_mode))
571 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
572 from = gen_lowpart (to_mode, from), from_mode = to_mode;
574 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
577 if (to_mode == from_mode
578 || (from_mode == VOIDmode && CONSTANT_P (from)))
580 emit_move_insn (to, from);
584 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
586 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
589 if (VECTOR_MODE_P (to_mode))
590 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
592 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
594 emit_move_insn (to, from);
598 if (to_real != from_real)
605 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
607 /* Try converting directly if the insn is supported. */
608 if ((code = can_extend_p (to_mode, from_mode, 0))
611 emit_unop_insn (code, to, from, UNKNOWN);
616 #ifdef HAVE_trunchfqf2
617 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
619 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
623 #ifdef HAVE_trunctqfqf2
624 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
626 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
630 #ifdef HAVE_truncsfqf2
631 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
633 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
637 #ifdef HAVE_truncdfqf2
638 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
640 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
644 #ifdef HAVE_truncxfqf2
645 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
647 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
651 #ifdef HAVE_trunctfqf2
652 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
654 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
659 #ifdef HAVE_trunctqfhf2
660 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
662 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
666 #ifdef HAVE_truncsfhf2
667 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
669 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
673 #ifdef HAVE_truncdfhf2
674 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
676 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
680 #ifdef HAVE_truncxfhf2
681 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
683 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
687 #ifdef HAVE_trunctfhf2
688 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
690 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
695 #ifdef HAVE_truncsftqf2
696 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
698 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
702 #ifdef HAVE_truncdftqf2
703 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
705 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
709 #ifdef HAVE_truncxftqf2
710 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
712 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
716 #ifdef HAVE_trunctftqf2
717 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
719 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
724 #ifdef HAVE_truncdfsf2
725 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
727 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
731 #ifdef HAVE_truncxfsf2
732 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
734 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
738 #ifdef HAVE_trunctfsf2
739 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
741 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
745 #ifdef HAVE_truncxfdf2
746 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
748 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
752 #ifdef HAVE_trunctfdf2
753 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
755 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 libcall = extendsfdf2_libfunc;
771 libcall = extendsfxf2_libfunc;
775 libcall = extendsftf2_libfunc;
787 libcall = truncdfsf2_libfunc;
791 libcall = extenddfxf2_libfunc;
795 libcall = extenddftf2_libfunc;
807 libcall = truncxfsf2_libfunc;
811 libcall = truncxfdf2_libfunc;
823 libcall = trunctfsf2_libfunc;
827 libcall = trunctfdf2_libfunc;
839 if (libcall == (rtx) 0)
840 /* This conversion is not implemented yet. */
844 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
846 insns = get_insns ();
848 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
853 /* Now both modes are integers. */
855 /* Handle expanding beyond a word. */
856 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
857 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
864 enum machine_mode lowpart_mode;
865 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
867 /* Try converting directly if the insn is supported. */
868 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
871 /* If FROM is a SUBREG, put it into a register. Do this
872 so that we always generate the same set of insns for
873 better cse'ing; if an intermediate assignment occurred,
874 we won't be doing the operation directly on the SUBREG. */
875 if (optimize > 0 && GET_CODE (from) == SUBREG)
876 from = force_reg (from_mode, from);
877 emit_unop_insn (code, to, from, equiv_code);
880 /* Next, try converting via full word. */
881 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
882 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
883 != CODE_FOR_nothing))
885 if (GET_CODE (to) == REG)
886 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
887 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
888 emit_unop_insn (code, to,
889 gen_lowpart (word_mode, to), equiv_code);
893 /* No special multiword conversion insn; do it by hand. */
896 /* Since we will turn this into a no conflict block, we must ensure
897 that the source does not overlap the target. */
899 if (reg_overlap_mentioned_p (to, from))
900 from = force_reg (from_mode, from);
902 /* Get a copy of FROM widened to a word, if necessary. */
903 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
904 lowpart_mode = word_mode;
906 lowpart_mode = from_mode;
908 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
910 lowpart = gen_lowpart (lowpart_mode, to);
911 emit_move_insn (lowpart, lowfrom);
913 /* Compute the value to put in each remaining word. */
915 fill_value = const0_rtx;
920 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
921 && STORE_FLAG_VALUE == -1)
923 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
925 fill_value = gen_reg_rtx (word_mode);
926 emit_insn (gen_slt (fill_value));
932 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
933 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
935 fill_value = convert_to_mode (word_mode, fill_value, 1);
939 /* Fill the remaining words. */
940 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
942 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
943 rtx subword = operand_subword (to, index, 1, to_mode);
948 if (fill_value != subword)
949 emit_move_insn (subword, fill_value);
952 insns = get_insns ();
955 emit_no_conflict_block (insns, to, from, NULL_RTX,
956 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
960 /* Truncating multi-word to a word or less. */
961 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
962 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
964 if (!((GET_CODE (from) == MEM
965 && ! MEM_VOLATILE_P (from)
966 && direct_load[(int) to_mode]
967 && ! mode_dependent_address_p (XEXP (from, 0)))
968 || GET_CODE (from) == REG
969 || GET_CODE (from) == SUBREG))
970 from = force_reg (from_mode, from);
971 convert_move (to, gen_lowpart (word_mode, from), 0);
975 /* Handle pointer conversion. */ /* SPEE 900220. */
976 if (to_mode == PQImode)
978 if (from_mode != QImode)
979 from = convert_to_mode (QImode, from, unsignedp);
981 #ifdef HAVE_truncqipqi2
982 if (HAVE_truncqipqi2)
984 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
987 #endif /* HAVE_truncqipqi2 */
991 if (from_mode == PQImode)
993 if (to_mode != QImode)
995 from = convert_to_mode (QImode, from, unsignedp);
1000 #ifdef HAVE_extendpqiqi2
1001 if (HAVE_extendpqiqi2)
1003 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1006 #endif /* HAVE_extendpqiqi2 */
1011 if (to_mode == PSImode)
1013 if (from_mode != SImode)
1014 from = convert_to_mode (SImode, from, unsignedp);
1016 #ifdef HAVE_truncsipsi2
1017 if (HAVE_truncsipsi2)
1019 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1022 #endif /* HAVE_truncsipsi2 */
1026 if (from_mode == PSImode)
1028 if (to_mode != SImode)
1030 from = convert_to_mode (SImode, from, unsignedp);
1035 #ifdef HAVE_extendpsisi2
1036 if (! unsignedp && HAVE_extendpsisi2)
1038 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1041 #endif /* HAVE_extendpsisi2 */
1042 #ifdef HAVE_zero_extendpsisi2
1043 if (unsignedp && HAVE_zero_extendpsisi2)
1045 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1048 #endif /* HAVE_zero_extendpsisi2 */
1053 if (to_mode == PDImode)
1055 if (from_mode != DImode)
1056 from = convert_to_mode (DImode, from, unsignedp);
1058 #ifdef HAVE_truncdipdi2
1059 if (HAVE_truncdipdi2)
1061 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1064 #endif /* HAVE_truncdipdi2 */
1068 if (from_mode == PDImode)
1070 if (to_mode != DImode)
1072 from = convert_to_mode (DImode, from, unsignedp);
1077 #ifdef HAVE_extendpdidi2
1078 if (HAVE_extendpdidi2)
1080 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1083 #endif /* HAVE_extendpdidi2 */
1088 /* Now follow all the conversions between integers
1089 no more than a word long. */
1091 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1092 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1093 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1094 GET_MODE_BITSIZE (from_mode)))
1096 if (!((GET_CODE (from) == MEM
1097 && ! MEM_VOLATILE_P (from)
1098 && direct_load[(int) to_mode]
1099 && ! mode_dependent_address_p (XEXP (from, 0)))
1100 || GET_CODE (from) == REG
1101 || GET_CODE (from) == SUBREG))
1102 from = force_reg (from_mode, from);
1103 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1104 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1105 from = copy_to_reg (from);
1106 emit_move_insn (to, gen_lowpart (to_mode, from));
1110 /* Handle extension. */
1111 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1113 /* Convert directly if that works. */
1114 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1115 != CODE_FOR_nothing)
1118 from = force_not_mem (from);
1120 emit_unop_insn (code, to, from, equiv_code);
1125 enum machine_mode intermediate;
1129 /* Search for a mode to convert via. */
1130 for (intermediate = from_mode; intermediate != VOIDmode;
1131 intermediate = GET_MODE_WIDER_MODE (intermediate))
1132 if (((can_extend_p (to_mode, intermediate, unsignedp)
1133 != CODE_FOR_nothing)
1134 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1135 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1136 GET_MODE_BITSIZE (intermediate))))
1137 && (can_extend_p (intermediate, from_mode, unsignedp)
1138 != CODE_FOR_nothing))
1140 convert_move (to, convert_to_mode (intermediate, from,
1141 unsignedp), unsignedp);
1145 /* No suitable intermediate mode.
1146 Generate what we need with shifts. */
1147 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1148 - GET_MODE_BITSIZE (from_mode), 0);
1149 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1150 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1152 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1155 emit_move_insn (to, tmp);
1160 /* Support special truncate insns for certain modes. */
1162 if (from_mode == DImode && to_mode == SImode)
1164 #ifdef HAVE_truncdisi2
1165 if (HAVE_truncdisi2)
1167 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 if (from_mode == DImode && to_mode == HImode)
1177 #ifdef HAVE_truncdihi2
1178 if (HAVE_truncdihi2)
1180 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 if (from_mode == DImode && to_mode == QImode)
1190 #ifdef HAVE_truncdiqi2
1191 if (HAVE_truncdiqi2)
1193 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 if (from_mode == SImode && to_mode == HImode)
1203 #ifdef HAVE_truncsihi2
1204 if (HAVE_truncsihi2)
1206 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 if (from_mode == SImode && to_mode == QImode)
1216 #ifdef HAVE_truncsiqi2
1217 if (HAVE_truncsiqi2)
1219 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 if (from_mode == HImode && to_mode == QImode)
1229 #ifdef HAVE_trunchiqi2
1230 if (HAVE_trunchiqi2)
1232 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1236 convert_move (to, force_reg (from_mode, from), unsignedp);
1240 if (from_mode == TImode && to_mode == DImode)
1242 #ifdef HAVE_trunctidi2
1243 if (HAVE_trunctidi2)
1245 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1249 convert_move (to, force_reg (from_mode, from), unsignedp);
1253 if (from_mode == TImode && to_mode == SImode)
1255 #ifdef HAVE_trunctisi2
1256 if (HAVE_trunctisi2)
1258 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1262 convert_move (to, force_reg (from_mode, from), unsignedp);
1266 if (from_mode == TImode && to_mode == HImode)
1268 #ifdef HAVE_trunctihi2
1269 if (HAVE_trunctihi2)
1271 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1275 convert_move (to, force_reg (from_mode, from), unsignedp);
1279 if (from_mode == TImode && to_mode == QImode)
1281 #ifdef HAVE_trunctiqi2
1282 if (HAVE_trunctiqi2)
1284 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1288 convert_move (to, force_reg (from_mode, from), unsignedp);
1292 /* Handle truncation of volatile memrefs, and so on;
1293 the things that couldn't be truncated directly,
1294 and for which there was no special instruction. */
1295 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1297 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1298 emit_move_insn (to, temp);
1302 /* Mode combination is not recognized. */
1306 /* Return an rtx for a value that would result
1307 from converting X to mode MODE.
1308 Both X and MODE may be floating, or both integer.
1309 UNSIGNEDP is nonzero if X is an unsigned value.
1310 This can be done by referring to a part of X in place
1311 or by copying to a new temporary with conversion.
1313 This function *must not* call protect_from_queue
1314 except when putting X into an insn (in which case convert_move does it). */
1317 convert_to_mode (mode, x, unsignedp)
1318 enum machine_mode mode;
1322 return convert_modes (mode, VOIDmode, x, unsignedp);
1325 /* Return an rtx for a value that would result
1326 from converting X from mode OLDMODE to mode MODE.
1327 Both modes may be floating, or both integer.
1328 UNSIGNEDP is nonzero if X is an unsigned value.
1330 This can be done by referring to a part of X in place
1331 or by copying to a new temporary with conversion.
1333 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1335 This function *must not* call protect_from_queue
1336 except when putting X into an insn (in which case convert_move does it). */
1339 convert_modes (mode, oldmode, x, unsignedp)
1340 enum machine_mode mode, oldmode;
1346 /* If FROM is a SUBREG that indicates that we have already done at least
1347 the required extension, strip it. */
1349 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1350 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1351 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1352 x = gen_lowpart (mode, x);
1354 if (GET_MODE (x) != VOIDmode)
1355 oldmode = GET_MODE (x);
1357 if (mode == oldmode)
1360 /* There is one case that we must handle specially: If we are converting
1361 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1362 we are to interpret the constant as unsigned, gen_lowpart will do
1363 the wrong if the constant appears negative. What we want to do is
1364 make the high-order word of the constant zero, not all ones. */
1366 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1367 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1368 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1370 HOST_WIDE_INT val = INTVAL (x);
1372 if (oldmode != VOIDmode
1373 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1375 int width = GET_MODE_BITSIZE (oldmode);
1377 /* We need to zero extend VAL. */
1378 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1381 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1384 /* We can do this with a gen_lowpart if both desired and current modes
1385 are integer, and this is either a constant integer, a register, or a
1386 non-volatile MEM. Except for the constant case where MODE is no
1387 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1389 if ((GET_CODE (x) == CONST_INT
1390 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1391 || (GET_MODE_CLASS (mode) == MODE_INT
1392 && GET_MODE_CLASS (oldmode) == MODE_INT
1393 && (GET_CODE (x) == CONST_DOUBLE
1394 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1395 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1396 && direct_load[(int) mode])
1397 || (GET_CODE (x) == REG
1398 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1399 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1401 /* ?? If we don't know OLDMODE, we have to assume here that
1402 X does not need sign- or zero-extension. This may not be
1403 the case, but it's the best we can do. */
1404 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1405 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1407 HOST_WIDE_INT val = INTVAL (x);
1408 int width = GET_MODE_BITSIZE (oldmode);
1410 /* We must sign or zero-extend in this case. Start by
1411 zero-extending, then sign extend if we need to. */
1412 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1414 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1415 val |= (HOST_WIDE_INT) (-1) << width;
1417 return gen_int_mode (val, mode);
1420 return gen_lowpart (mode, x);
1423 temp = gen_reg_rtx (mode);
1424 convert_move (temp, x, unsignedp);
1428 /* This macro is used to determine what the largest unit size that
1429 move_by_pieces can use is. */
1431 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1432 move efficiently, as opposed to MOVE_MAX which is the maximum
1433 number of bytes we can move with a single instruction. */
1435 #ifndef MOVE_MAX_PIECES
1436 #define MOVE_MAX_PIECES MOVE_MAX
1439 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1440 store efficiently. Due to internal GCC limitations, this is
1441 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1442 for an immediate constant. */
1444 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1446 /* Generate several move instructions to copy LEN bytes from block FROM to
1447 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1448 and TO through protect_from_queue before calling.
1450 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1451 used to push FROM to the stack.
1453 ALIGN is maximum alignment we can assume. */
1456 move_by_pieces (to, from, len, align)
1458 unsigned HOST_WIDE_INT len;
1461 struct move_by_pieces data;
1462 rtx to_addr, from_addr = XEXP (from, 0);
1463 unsigned int max_size = MOVE_MAX_PIECES + 1;
1464 enum machine_mode mode = VOIDmode, tmode;
1465 enum insn_code icode;
1468 data.from_addr = from_addr;
1471 to_addr = XEXP (to, 0);
1474 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1475 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1477 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1484 #ifdef STACK_GROWS_DOWNWARD
1490 data.to_addr = to_addr;
1493 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1494 || GET_CODE (from_addr) == POST_INC
1495 || GET_CODE (from_addr) == POST_DEC);
1497 data.explicit_inc_from = 0;
1498 data.explicit_inc_to = 0;
1499 if (data.reverse) data.offset = len;
1502 /* If copying requires more than two move insns,
1503 copy addresses to registers (to make displacements shorter)
1504 and use post-increment if available. */
1505 if (!(data.autinc_from && data.autinc_to)
1506 && move_by_pieces_ninsns (len, align) > 2)
1508 /* Find the mode of the largest move... */
1509 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1510 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1511 if (GET_MODE_SIZE (tmode) < max_size)
1514 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1516 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1517 data.autinc_from = 1;
1518 data.explicit_inc_from = -1;
1520 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1522 data.from_addr = copy_addr_to_reg (from_addr);
1523 data.autinc_from = 1;
1524 data.explicit_inc_from = 1;
1526 if (!data.autinc_from && CONSTANT_P (from_addr))
1527 data.from_addr = copy_addr_to_reg (from_addr);
1528 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1530 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1532 data.explicit_inc_to = -1;
1534 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1536 data.to_addr = copy_addr_to_reg (to_addr);
1538 data.explicit_inc_to = 1;
1540 if (!data.autinc_to && CONSTANT_P (to_addr))
1541 data.to_addr = copy_addr_to_reg (to_addr);
1544 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1545 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1546 align = MOVE_MAX * BITS_PER_UNIT;
1548 /* First move what we can in the largest integer mode, then go to
1549 successively smaller modes. */
1551 while (max_size > 1)
1553 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1554 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1555 if (GET_MODE_SIZE (tmode) < max_size)
1558 if (mode == VOIDmode)
1561 icode = mov_optab->handlers[(int) mode].insn_code;
1562 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1563 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1565 max_size = GET_MODE_SIZE (mode);
1568 /* The code above should have handled everything. */
1573 /* Return number of insns required to move L bytes by pieces.
1574 ALIGN (in bits) is maximum alignment we can assume. */
1576 static unsigned HOST_WIDE_INT
1577 move_by_pieces_ninsns (l, align)
1578 unsigned HOST_WIDE_INT l;
1581 unsigned HOST_WIDE_INT n_insns = 0;
1582 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1584 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1585 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1586 align = MOVE_MAX * BITS_PER_UNIT;
1588 while (max_size > 1)
1590 enum machine_mode mode = VOIDmode, tmode;
1591 enum insn_code icode;
1593 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1594 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1595 if (GET_MODE_SIZE (tmode) < max_size)
1598 if (mode == VOIDmode)
1601 icode = mov_optab->handlers[(int) mode].insn_code;
1602 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1603 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1605 max_size = GET_MODE_SIZE (mode);
1613 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1614 with move instructions for mode MODE. GENFUN is the gen_... function
1615 to make a move insn for that mode. DATA has all the other info. */
1618 move_by_pieces_1 (genfun, mode, data)
1619 rtx (*genfun) PARAMS ((rtx, ...));
1620 enum machine_mode mode;
1621 struct move_by_pieces *data;
1623 unsigned int size = GET_MODE_SIZE (mode);
1624 rtx to1 = NULL_RTX, from1;
1626 while (data->len >= size)
1629 data->offset -= size;
1633 if (data->autinc_to)
1634 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1637 to1 = adjust_address (data->to, mode, data->offset);
1640 if (data->autinc_from)
1641 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1644 from1 = adjust_address (data->from, mode, data->offset);
1646 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1647 emit_insn (gen_add2_insn (data->to_addr,
1648 GEN_INT (-(HOST_WIDE_INT)size)));
1649 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1650 emit_insn (gen_add2_insn (data->from_addr,
1651 GEN_INT (-(HOST_WIDE_INT)size)));
1654 emit_insn ((*genfun) (to1, from1));
1657 #ifdef PUSH_ROUNDING
1658 emit_single_push_insn (mode, from1, NULL);
1664 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1665 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1666 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1667 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1669 if (! data->reverse)
1670 data->offset += size;
1676 /* Emit code to move a block Y to a block X. This may be done with
1677 string-move instructions, with multiple scalar move instructions,
1678 or with a library call.
1680 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1681 SIZE is an rtx that says how long they are.
1682 ALIGN is the maximum alignment we can assume they have.
1683 METHOD describes what kind of copy this is, and what mechanisms may be used.
1685 Return the address of the new block, if memcpy is called and returns it,
1689 emit_block_move (x, y, size, method)
1691 enum block_op_methods method;
1699 case BLOCK_OP_NORMAL:
1700 may_use_call = true;
1703 case BLOCK_OP_CALL_PARM:
1704 may_use_call = block_move_libcall_safe_for_call_parm ();
1706 /* Make inhibit_defer_pop nonzero around the library call
1707 to force it to pop the arguments right away. */
1711 case BLOCK_OP_NO_LIBCALL:
1712 may_use_call = false;
1719 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1721 if (GET_MODE (x) != BLKmode)
1723 if (GET_MODE (y) != BLKmode)
1726 x = protect_from_queue (x, 1);
1727 y = protect_from_queue (y, 0);
1728 size = protect_from_queue (size, 0);
1730 if (GET_CODE (x) != MEM)
1732 if (GET_CODE (y) != MEM)
1737 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1738 can be incorrect is coming from __builtin_memcpy. */
1739 if (GET_CODE (size) == CONST_INT)
1741 x = shallow_copy_rtx (x);
1742 y = shallow_copy_rtx (y);
1743 set_mem_size (x, size);
1744 set_mem_size (y, size);
1747 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1748 move_by_pieces (x, y, INTVAL (size), align);
1749 else if (emit_block_move_via_movstr (x, y, size, align))
1751 else if (may_use_call)
1752 retval = emit_block_move_via_libcall (x, y, size);
1754 emit_block_move_via_loop (x, y, size, align);
1756 if (method == BLOCK_OP_CALL_PARM)
1762 /* A subroutine of emit_block_move. Returns true if calling the
1763 block move libcall will not clobber any parameters which may have
1764 already been placed on the stack. */
1767 block_move_libcall_safe_for_call_parm ()
1773 /* Check to see whether memcpy takes all register arguments. */
1775 takes_regs_uninit, takes_regs_no, takes_regs_yes
1776 } takes_regs = takes_regs_uninit;
1780 case takes_regs_uninit:
1782 CUMULATIVE_ARGS args_so_far;
1785 fn = emit_block_move_libcall_fn (false);
1786 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1788 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1789 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1791 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1792 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1793 if (!tmp || !REG_P (tmp))
1794 goto fail_takes_regs;
1795 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1796 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1798 goto fail_takes_regs;
1800 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1803 takes_regs = takes_regs_yes;
1806 case takes_regs_yes:
1810 takes_regs = takes_regs_no;
1821 /* A subroutine of emit_block_move. Expand a movstr pattern;
1822 return true if successful. */
1825 emit_block_move_via_movstr (x, y, size, align)
1829 /* Try the most limited insn first, because there's no point
1830 including more than one in the machine description unless
1831 the more limited one has some advantage. */
1833 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1834 enum machine_mode mode;
1836 /* Since this is a move insn, we don't care about volatility. */
1839 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1840 mode = GET_MODE_WIDER_MODE (mode))
1842 enum insn_code code = movstr_optab[(int) mode];
1843 insn_operand_predicate_fn pred;
1845 if (code != CODE_FOR_nothing
1846 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1847 here because if SIZE is less than the mode mask, as it is
1848 returned by the macro, it will definitely be less than the
1849 actual mode mask. */
1850 && ((GET_CODE (size) == CONST_INT
1851 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1852 <= (GET_MODE_MASK (mode) >> 1)))
1853 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1854 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1855 || (*pred) (x, BLKmode))
1856 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1857 || (*pred) (y, BLKmode))
1858 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1859 || (*pred) (opalign, VOIDmode)))
1862 rtx last = get_last_insn ();
1865 op2 = convert_to_mode (mode, size, 1);
1866 pred = insn_data[(int) code].operand[2].predicate;
1867 if (pred != 0 && ! (*pred) (op2, mode))
1868 op2 = copy_to_mode_reg (mode, op2);
1870 /* ??? When called via emit_block_move_for_call, it'd be
1871 nice if there were some way to inform the backend, so
1872 that it doesn't fail the expansion because it thinks
1873 emitting the libcall would be more efficient. */
1875 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1883 delete_insns_since (last);
1891 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1892 Return the return value from memcpy, 0 otherwise. */
1895 emit_block_move_via_libcall (dst, src, size)
1898 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1899 enum machine_mode size_mode;
1902 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1904 It is unsafe to save the value generated by protect_from_queue
1905 and reuse it later. Consider what happens if emit_queue is
1906 called before the return value from protect_from_queue is used.
1908 Expansion of the CALL_EXPR below will call emit_queue before
1909 we are finished emitting RTL for argument setup. So if we are
1910 not careful we could get the wrong value for an argument.
1912 To avoid this problem we go ahead and emit code to copy X, Y &
1913 SIZE into new pseudos. We can then place those new pseudos
1914 into an RTL_EXPR and use them later, even after a call to
1917 Note this is not strictly needed for library calls since they
1918 do not call emit_queue before loading their arguments. However,
1919 we may need to have library calls call emit_queue in the future
1920 since failing to do so could cause problems for targets which
1921 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1923 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1924 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1926 if (TARGET_MEM_FUNCTIONS)
1927 size_mode = TYPE_MODE (sizetype);
1929 size_mode = TYPE_MODE (unsigned_type_node);
1930 size = convert_to_mode (size_mode, size, 1);
1931 size = copy_to_mode_reg (size_mode, size);
1933 /* It is incorrect to use the libcall calling conventions to call
1934 memcpy in this context. This could be a user call to memcpy and
1935 the user may wish to examine the return value from memcpy. For
1936 targets where libcalls and normal calls have different conventions
1937 for returning pointers, we could end up generating incorrect code.
1939 For convenience, we generate the call to bcopy this way as well. */
1941 dst_tree = make_tree (ptr_type_node, dst);
1942 src_tree = make_tree (ptr_type_node, src);
1943 if (TARGET_MEM_FUNCTIONS)
1944 size_tree = make_tree (sizetype, size);
1946 size_tree = make_tree (unsigned_type_node, size);
1948 fn = emit_block_move_libcall_fn (true);
1949 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1950 if (TARGET_MEM_FUNCTIONS)
1952 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1953 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1957 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1958 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1961 /* Now we have to build up the CALL_EXPR itself. */
1962 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1963 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1964 call_expr, arg_list, NULL_TREE);
1965 TREE_SIDE_EFFECTS (call_expr) = 1;
1967 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1969 /* If we are initializing a readonly value, show the above call
1970 clobbered it. Otherwise, a load from it may erroneously be
1971 hoisted from a loop. */
1972 if (RTX_UNCHANGING_P (dst))
1973 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1975 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1978 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1979 for the function we use for block copies. The first time FOR_CALL
1980 is true, we call assemble_external. */
1982 static GTY(()) tree block_move_fn;
1985 emit_block_move_libcall_fn (for_call)
1988 static bool emitted_extern;
1989 tree fn = block_move_fn, args;
1993 if (TARGET_MEM_FUNCTIONS)
1995 fn = get_identifier ("memcpy");
1996 args = build_function_type_list (ptr_type_node, ptr_type_node,
1997 const_ptr_type_node, sizetype,
2002 fn = get_identifier ("bcopy");
2003 args = build_function_type_list (void_type_node, const_ptr_type_node,
2004 ptr_type_node, unsigned_type_node,
2008 fn = build_decl (FUNCTION_DECL, fn, args);
2009 DECL_EXTERNAL (fn) = 1;
2010 TREE_PUBLIC (fn) = 1;
2011 DECL_ARTIFICIAL (fn) = 1;
2012 TREE_NOTHROW (fn) = 1;
2017 if (for_call && !emitted_extern)
2019 emitted_extern = true;
2020 make_decl_rtl (fn, NULL);
2021 assemble_external (fn);
2027 /* A subroutine of emit_block_move. Copy the data via an explicit
2028 loop. This is used only when libcalls are forbidden. */
2029 /* ??? It'd be nice to copy in hunks larger than QImode. */
2032 emit_block_move_via_loop (x, y, size, align)
2034 unsigned int align ATTRIBUTE_UNUSED;
2036 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2037 enum machine_mode iter_mode;
2039 iter_mode = GET_MODE (size);
2040 if (iter_mode == VOIDmode)
2041 iter_mode = word_mode;
2043 top_label = gen_label_rtx ();
2044 cmp_label = gen_label_rtx ();
2045 iter = gen_reg_rtx (iter_mode);
2047 emit_move_insn (iter, const0_rtx);
2049 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2050 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2051 do_pending_stack_adjust ();
2053 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2055 emit_jump (cmp_label);
2056 emit_label (top_label);
2058 tmp = convert_modes (Pmode, iter_mode, iter, true);
2059 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2060 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2061 x = change_address (x, QImode, x_addr);
2062 y = change_address (y, QImode, y_addr);
2064 emit_move_insn (x, y);
2066 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2067 true, OPTAB_LIB_WIDEN);
2069 emit_move_insn (iter, tmp);
2071 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2072 emit_label (cmp_label);
2074 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2077 emit_note (NULL, NOTE_INSN_LOOP_END);
2080 /* Copy all or part of a value X into registers starting at REGNO.
2081 The number of registers to be filled is NREGS. */
2084 move_block_to_reg (regno, x, nregs, mode)
2088 enum machine_mode mode;
2091 #ifdef HAVE_load_multiple
2099 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2100 x = validize_mem (force_const_mem (mode, x));
2102 /* See if the machine can do this with a load multiple insn. */
2103 #ifdef HAVE_load_multiple
2104 if (HAVE_load_multiple)
2106 last = get_last_insn ();
2107 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2115 delete_insns_since (last);
2119 for (i = 0; i < nregs; i++)
2120 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2121 operand_subword_force (x, i, mode));
2124 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2125 The number of registers to be filled is NREGS. SIZE indicates the number
2126 of bytes in the object X. */
2129 move_block_from_reg (regno, x, nregs, size)
2136 #ifdef HAVE_store_multiple
2140 enum machine_mode mode;
2145 /* If SIZE is that of a mode no bigger than a word, just use that
2146 mode's store operation. */
2147 if (size <= UNITS_PER_WORD
2148 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
2149 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
2151 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2155 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2156 to the left before storing to memory. Note that the previous test
2157 doesn't handle all cases (e.g. SIZE == 3). */
2158 if (size < UNITS_PER_WORD
2160 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
2162 rtx tem = operand_subword (x, 0, 1, BLKmode);
2168 shift = expand_shift (LSHIFT_EXPR, word_mode,
2169 gen_rtx_REG (word_mode, regno),
2170 build_int_2 ((UNITS_PER_WORD - size)
2171 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2172 emit_move_insn (tem, shift);
2176 /* See if the machine can do this with a store multiple insn. */
2177 #ifdef HAVE_store_multiple
2178 if (HAVE_store_multiple)
2180 last = get_last_insn ();
2181 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2189 delete_insns_since (last);
2193 for (i = 0; i < nregs; i++)
2195 rtx tem = operand_subword (x, i, 1, BLKmode);
2200 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2204 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2205 registers represented by a PARALLEL. SSIZE represents the total size of
2206 block SRC in bytes, or -1 if not known. */
2207 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2208 the balance will be in what would be the low-order memory addresses, i.e.
2209 left justified for big endian, right justified for little endian. This
2210 happens to be true for the targets currently using this support. If this
2211 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2215 emit_group_load (dst, orig_src, ssize)
2222 if (GET_CODE (dst) != PARALLEL)
2225 /* Check for a NULL entry, used to indicate that the parameter goes
2226 both on the stack and in registers. */
2227 if (XEXP (XVECEXP (dst, 0, 0), 0))
2232 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2234 /* Process the pieces. */
2235 for (i = start; i < XVECLEN (dst, 0); i++)
2237 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2238 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2239 unsigned int bytelen = GET_MODE_SIZE (mode);
2242 /* Handle trailing fragments that run over the size of the struct. */
2243 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2245 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2246 bytelen = ssize - bytepos;
2251 /* If we won't be loading directly from memory, protect the real source
2252 from strange tricks we might play; but make sure that the source can
2253 be loaded directly into the destination. */
2255 if (GET_CODE (orig_src) != MEM
2256 && (!CONSTANT_P (orig_src)
2257 || (GET_MODE (orig_src) != mode
2258 && GET_MODE (orig_src) != VOIDmode)))
2260 if (GET_MODE (orig_src) == VOIDmode)
2261 src = gen_reg_rtx (mode);
2263 src = gen_reg_rtx (GET_MODE (orig_src));
2265 emit_move_insn (src, orig_src);
2268 /* Optimize the access just a bit. */
2269 if (GET_CODE (src) == MEM
2270 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2271 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2272 && bytelen == GET_MODE_SIZE (mode))
2274 tmps[i] = gen_reg_rtx (mode);
2275 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2277 else if (GET_CODE (src) == CONCAT)
2279 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2280 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2282 if ((bytepos == 0 && bytelen == slen0)
2283 || (bytepos != 0 && bytepos + bytelen <= slen))
2285 /* The following assumes that the concatenated objects all
2286 have the same size. In this case, a simple calculation
2287 can be used to determine the object and the bit field
2289 tmps[i] = XEXP (src, bytepos / slen0);
2290 if (! CONSTANT_P (tmps[i])
2291 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2292 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2293 (bytepos % slen0) * BITS_PER_UNIT,
2294 1, NULL_RTX, mode, mode, ssize);
2296 else if (bytepos == 0)
2298 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2299 emit_move_insn (mem, src);
2300 tmps[i] = adjust_address (mem, mode, 0);
2305 else if (CONSTANT_P (src)
2306 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2309 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2310 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2313 if (BYTES_BIG_ENDIAN && shift)
2314 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2315 tmps[i], 0, OPTAB_WIDEN);
2320 /* Copy the extracted pieces into the proper (probable) hard regs. */
2321 for (i = start; i < XVECLEN (dst, 0); i++)
2322 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2325 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2326 registers represented by a PARALLEL. SSIZE represents the total size of
2327 block DST, or -1 if not known. */
2330 emit_group_store (orig_dst, src, ssize)
2337 if (GET_CODE (src) != PARALLEL)
2340 /* Check for a NULL entry, used to indicate that the parameter goes
2341 both on the stack and in registers. */
2342 if (XEXP (XVECEXP (src, 0, 0), 0))
2347 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2349 /* Copy the (probable) hard regs into pseudos. */
2350 for (i = start; i < XVECLEN (src, 0); i++)
2352 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2353 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2354 emit_move_insn (tmps[i], reg);
2358 /* If we won't be storing directly into memory, protect the real destination
2359 from strange tricks we might play. */
2361 if (GET_CODE (dst) == PARALLEL)
2365 /* We can get a PARALLEL dst if there is a conditional expression in
2366 a return statement. In that case, the dst and src are the same,
2367 so no action is necessary. */
2368 if (rtx_equal_p (dst, src))
2371 /* It is unclear if we can ever reach here, but we may as well handle
2372 it. Allocate a temporary, and split this into a store/load to/from
2375 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2376 emit_group_store (temp, src, ssize);
2377 emit_group_load (dst, temp, ssize);
2380 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2382 dst = gen_reg_rtx (GET_MODE (orig_dst));
2383 /* Make life a bit easier for combine. */
2384 emit_move_insn (dst, const0_rtx);
2387 /* Process the pieces. */
2388 for (i = start; i < XVECLEN (src, 0); i++)
2390 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2391 enum machine_mode mode = GET_MODE (tmps[i]);
2392 unsigned int bytelen = GET_MODE_SIZE (mode);
2395 /* Handle trailing fragments that run over the size of the struct. */
2396 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2398 if (BYTES_BIG_ENDIAN)
2400 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2401 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2402 tmps[i], 0, OPTAB_WIDEN);
2404 bytelen = ssize - bytepos;
2407 if (GET_CODE (dst) == CONCAT)
2409 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2410 dest = XEXP (dst, 0);
2411 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2413 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2414 dest = XEXP (dst, 1);
2420 /* Optimize the access just a bit. */
2421 if (GET_CODE (dest) == MEM
2422 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2423 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2424 && bytelen == GET_MODE_SIZE (mode))
2425 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2427 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2428 mode, tmps[i], ssize);
2433 /* Copy from the pseudo into the (probable) hard reg. */
2434 if (GET_CODE (dst) == REG)
2435 emit_move_insn (orig_dst, dst);
2438 /* Generate code to copy a BLKmode object of TYPE out of a
2439 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2440 is null, a stack temporary is created. TGTBLK is returned.
2442 The primary purpose of this routine is to handle functions
2443 that return BLKmode structures in registers. Some machines
2444 (the PA for example) want to return all small structures
2445 in registers regardless of the structure's alignment. */
2448 copy_blkmode_from_reg (tgtblk, srcreg, type)
2453 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2454 rtx src = NULL, dst = NULL;
2455 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2456 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2460 tgtblk = assign_temp (build_qualified_type (type,
2462 | TYPE_QUAL_CONST)),
2464 preserve_temp_slots (tgtblk);
2467 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2468 into a new pseudo which is a full word.
2470 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2471 the wrong part of the register gets copied so we fake a type conversion
2473 if (GET_MODE (srcreg) != BLKmode
2474 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2476 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2477 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2479 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2482 /* Structures whose size is not a multiple of a word are aligned
2483 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2484 machine, this means we must skip the empty high order bytes when
2485 calculating the bit offset. */
2486 if (BYTES_BIG_ENDIAN
2487 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2488 && bytes % UNITS_PER_WORD)
2489 big_endian_correction
2490 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2492 /* Copy the structure BITSIZE bites at a time.
2494 We could probably emit more efficient code for machines which do not use
2495 strict alignment, but it doesn't seem worth the effort at the current
2497 for (bitpos = 0, xbitpos = big_endian_correction;
2498 bitpos < bytes * BITS_PER_UNIT;
2499 bitpos += bitsize, xbitpos += bitsize)
2501 /* We need a new source operand each time xbitpos is on a
2502 word boundary and when xbitpos == big_endian_correction
2503 (the first time through). */
2504 if (xbitpos % BITS_PER_WORD == 0
2505 || xbitpos == big_endian_correction)
2506 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2509 /* We need a new destination operand each time bitpos is on
2511 if (bitpos % BITS_PER_WORD == 0)
2512 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2514 /* Use xbitpos for the source extraction (right justified) and
2515 xbitpos for the destination store (left justified). */
2516 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2517 extract_bit_field (src, bitsize,
2518 xbitpos % BITS_PER_WORD, 1,
2519 NULL_RTX, word_mode, word_mode,
2527 /* Add a USE expression for REG to the (possibly empty) list pointed
2528 to by CALL_FUSAGE. REG must denote a hard register. */
2531 use_reg (call_fusage, reg)
2532 rtx *call_fusage, reg;
2534 if (GET_CODE (reg) != REG
2535 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2539 = gen_rtx_EXPR_LIST (VOIDmode,
2540 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2543 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2544 starting at REGNO. All of these registers must be hard registers. */
2547 use_regs (call_fusage, regno, nregs)
2554 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2557 for (i = 0; i < nregs; i++)
2558 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2561 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2562 PARALLEL REGS. This is for calls that pass values in multiple
2563 non-contiguous locations. The Irix 6 ABI has examples of this. */
2566 use_group_regs (call_fusage, regs)
2572 for (i = 0; i < XVECLEN (regs, 0); i++)
2574 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2576 /* A NULL entry means the parameter goes both on the stack and in
2577 registers. This can also be a MEM for targets that pass values
2578 partially on the stack and partially in registers. */
2579 if (reg != 0 && GET_CODE (reg) == REG)
2580 use_reg (call_fusage, reg);
2585 /* Determine whether the LEN bytes generated by CONSTFUN can be
2586 stored to memory using several move instructions. CONSTFUNDATA is
2587 a pointer which will be passed as argument in every CONSTFUN call.
2588 ALIGN is maximum alignment we can assume. Return nonzero if a
2589 call to store_by_pieces should succeed. */
2592 can_store_by_pieces (len, constfun, constfundata, align)
2593 unsigned HOST_WIDE_INT len;
2594 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2598 unsigned HOST_WIDE_INT max_size, l;
2599 HOST_WIDE_INT offset = 0;
2600 enum machine_mode mode, tmode;
2601 enum insn_code icode;
2605 if (! MOVE_BY_PIECES_P (len, align))
2608 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2609 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2610 align = MOVE_MAX * BITS_PER_UNIT;
2612 /* We would first store what we can in the largest integer mode, then go to
2613 successively smaller modes. */
2616 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2621 max_size = STORE_MAX_PIECES + 1;
2622 while (max_size > 1)
2624 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2625 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2626 if (GET_MODE_SIZE (tmode) < max_size)
2629 if (mode == VOIDmode)
2632 icode = mov_optab->handlers[(int) mode].insn_code;
2633 if (icode != CODE_FOR_nothing
2634 && align >= GET_MODE_ALIGNMENT (mode))
2636 unsigned int size = GET_MODE_SIZE (mode);
2643 cst = (*constfun) (constfundata, offset, mode);
2644 if (!LEGITIMATE_CONSTANT_P (cst))
2654 max_size = GET_MODE_SIZE (mode);
2657 /* The code above should have handled everything. */
2665 /* Generate several move instructions to store LEN bytes generated by
2666 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2667 pointer which will be passed as argument in every CONSTFUN call.
2668 ALIGN is maximum alignment we can assume. */
2671 store_by_pieces (to, len, constfun, constfundata, align)
2673 unsigned HOST_WIDE_INT len;
2674 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2678 struct store_by_pieces data;
2680 if (! MOVE_BY_PIECES_P (len, align))
2682 to = protect_from_queue (to, 1);
2683 data.constfun = constfun;
2684 data.constfundata = constfundata;
2687 store_by_pieces_1 (&data, align);
2690 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2691 rtx with BLKmode). The caller must pass TO through protect_from_queue
2692 before calling. ALIGN is maximum alignment we can assume. */
2695 clear_by_pieces (to, len, align)
2697 unsigned HOST_WIDE_INT len;
2700 struct store_by_pieces data;
2702 data.constfun = clear_by_pieces_1;
2703 data.constfundata = NULL;
2706 store_by_pieces_1 (&data, align);
2709 /* Callback routine for clear_by_pieces.
2710 Return const0_rtx unconditionally. */
2713 clear_by_pieces_1 (data, offset, mode)
2714 PTR data ATTRIBUTE_UNUSED;
2715 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2716 enum machine_mode mode ATTRIBUTE_UNUSED;
2721 /* Subroutine of clear_by_pieces and store_by_pieces.
2722 Generate several move instructions to store LEN bytes of block TO. (A MEM
2723 rtx with BLKmode). The caller must pass TO through protect_from_queue
2724 before calling. ALIGN is maximum alignment we can assume. */
2727 store_by_pieces_1 (data, align)
2728 struct store_by_pieces *data;
2731 rtx to_addr = XEXP (data->to, 0);
2732 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2733 enum machine_mode mode = VOIDmode, tmode;
2734 enum insn_code icode;
2737 data->to_addr = to_addr;
2739 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2740 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2742 data->explicit_inc_to = 0;
2744 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2746 data->offset = data->len;
2748 /* If storing requires more than two move insns,
2749 copy addresses to registers (to make displacements shorter)
2750 and use post-increment if available. */
2751 if (!data->autinc_to
2752 && move_by_pieces_ninsns (data->len, align) > 2)
2754 /* Determine the main mode we'll be using. */
2755 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2756 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2757 if (GET_MODE_SIZE (tmode) < max_size)
2760 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2762 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2763 data->autinc_to = 1;
2764 data->explicit_inc_to = -1;
2767 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2768 && ! data->autinc_to)
2770 data->to_addr = copy_addr_to_reg (to_addr);
2771 data->autinc_to = 1;
2772 data->explicit_inc_to = 1;
2775 if ( !data->autinc_to && CONSTANT_P (to_addr))
2776 data->to_addr = copy_addr_to_reg (to_addr);
2779 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2780 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2781 align = MOVE_MAX * BITS_PER_UNIT;
2783 /* First store what we can in the largest integer mode, then go to
2784 successively smaller modes. */
2786 while (max_size > 1)
2788 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2789 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2790 if (GET_MODE_SIZE (tmode) < max_size)
2793 if (mode == VOIDmode)
2796 icode = mov_optab->handlers[(int) mode].insn_code;
2797 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2798 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2800 max_size = GET_MODE_SIZE (mode);
2803 /* The code above should have handled everything. */
2808 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2809 with move instructions for mode MODE. GENFUN is the gen_... function
2810 to make a move insn for that mode. DATA has all the other info. */
2813 store_by_pieces_2 (genfun, mode, data)
2814 rtx (*genfun) PARAMS ((rtx, ...));
2815 enum machine_mode mode;
2816 struct store_by_pieces *data;
2818 unsigned int size = GET_MODE_SIZE (mode);
2821 while (data->len >= size)
2824 data->offset -= size;
2826 if (data->autinc_to)
2827 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2830 to1 = adjust_address (data->to, mode, data->offset);
2832 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2833 emit_insn (gen_add2_insn (data->to_addr,
2834 GEN_INT (-(HOST_WIDE_INT) size)));
2836 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2837 emit_insn ((*genfun) (to1, cst));
2839 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2840 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2842 if (! data->reverse)
2843 data->offset += size;
2849 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2850 its length in bytes. */
2853 clear_storage (object, size)
2858 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2859 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2861 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2862 just move a zero. Otherwise, do this a piece at a time. */
2863 if (GET_MODE (object) != BLKmode
2864 && GET_CODE (size) == CONST_INT
2865 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2866 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2869 object = protect_from_queue (object, 1);
2870 size = protect_from_queue (size, 0);
2872 if (GET_CODE (size) == CONST_INT
2873 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2874 clear_by_pieces (object, INTVAL (size), align);
2875 else if (clear_storage_via_clrstr (object, size, align))
2878 retval = clear_storage_via_libcall (object, size);
2884 /* A subroutine of clear_storage. Expand a clrstr pattern;
2885 return true if successful. */
2888 clear_storage_via_clrstr (object, size, align)
2892 /* Try the most limited insn first, because there's no point
2893 including more than one in the machine description unless
2894 the more limited one has some advantage. */
2896 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2897 enum machine_mode mode;
2899 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2900 mode = GET_MODE_WIDER_MODE (mode))
2902 enum insn_code code = clrstr_optab[(int) mode];
2903 insn_operand_predicate_fn pred;
2905 if (code != CODE_FOR_nothing
2906 /* We don't need MODE to be narrower than
2907 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2908 the mode mask, as it is returned by the macro, it will
2909 definitely be less than the actual mode mask. */
2910 && ((GET_CODE (size) == CONST_INT
2911 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2912 <= (GET_MODE_MASK (mode) >> 1)))
2913 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2914 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2915 || (*pred) (object, BLKmode))
2916 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2917 || (*pred) (opalign, VOIDmode)))
2920 rtx last = get_last_insn ();
2923 op1 = convert_to_mode (mode, size, 1);
2924 pred = insn_data[(int) code].operand[1].predicate;
2925 if (pred != 0 && ! (*pred) (op1, mode))
2926 op1 = copy_to_mode_reg (mode, op1);
2928 pat = GEN_FCN ((int) code) (object, op1, opalign);
2935 delete_insns_since (last);
2942 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2943 Return the return value of memset, 0 otherwise. */
2946 clear_storage_via_libcall (object, size)
2949 tree call_expr, arg_list, fn, object_tree, size_tree;
2950 enum machine_mode size_mode;
2953 /* OBJECT or SIZE may have been passed through protect_from_queue.
2955 It is unsafe to save the value generated by protect_from_queue
2956 and reuse it later. Consider what happens if emit_queue is
2957 called before the return value from protect_from_queue is used.
2959 Expansion of the CALL_EXPR below will call emit_queue before
2960 we are finished emitting RTL for argument setup. So if we are
2961 not careful we could get the wrong value for an argument.
2963 To avoid this problem we go ahead and emit code to copy OBJECT
2964 and SIZE into new pseudos. We can then place those new pseudos
2965 into an RTL_EXPR and use them later, even after a call to
2968 Note this is not strictly needed for library calls since they
2969 do not call emit_queue before loading their arguments. However,
2970 we may need to have library calls call emit_queue in the future
2971 since failing to do so could cause problems for targets which
2972 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2974 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2976 if (TARGET_MEM_FUNCTIONS)
2977 size_mode = TYPE_MODE (sizetype);
2979 size_mode = TYPE_MODE (unsigned_type_node);
2980 size = convert_to_mode (size_mode, size, 1);
2981 size = copy_to_mode_reg (size_mode, size);
2983 /* It is incorrect to use the libcall calling conventions to call
2984 memset in this context. This could be a user call to memset and
2985 the user may wish to examine the return value from memset. For
2986 targets where libcalls and normal calls have different conventions
2987 for returning pointers, we could end up generating incorrect code.
2989 For convenience, we generate the call to bzero this way as well. */
2991 object_tree = make_tree (ptr_type_node, object);
2992 if (TARGET_MEM_FUNCTIONS)
2993 size_tree = make_tree (sizetype, size);
2995 size_tree = make_tree (unsigned_type_node, size);
2997 fn = clear_storage_libcall_fn (true);
2998 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2999 if (TARGET_MEM_FUNCTIONS)
3000 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3001 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3003 /* Now we have to build up the CALL_EXPR itself. */
3004 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3005 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3006 call_expr, arg_list, NULL_TREE);
3007 TREE_SIDE_EFFECTS (call_expr) = 1;
3009 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3011 /* If we are initializing a readonly value, show the above call
3012 clobbered it. Otherwise, a load from it may erroneously be
3013 hoisted from a loop. */
3014 if (RTX_UNCHANGING_P (object))
3015 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3017 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3020 /* A subroutine of clear_storage_via_libcall. Create the tree node
3021 for the function we use for block clears. The first time FOR_CALL
3022 is true, we call assemble_external. */
3024 static GTY(()) tree block_clear_fn;
3027 clear_storage_libcall_fn (for_call)
3030 static bool emitted_extern;
3031 tree fn = block_clear_fn, args;
3035 if (TARGET_MEM_FUNCTIONS)
3037 fn = get_identifier ("memset");
3038 args = build_function_type_list (ptr_type_node, ptr_type_node,
3039 integer_type_node, sizetype,
3044 fn = get_identifier ("bzero");
3045 args = build_function_type_list (void_type_node, ptr_type_node,
3046 unsigned_type_node, NULL_TREE);
3049 fn = build_decl (FUNCTION_DECL, fn, args);
3050 DECL_EXTERNAL (fn) = 1;
3051 TREE_PUBLIC (fn) = 1;
3052 DECL_ARTIFICIAL (fn) = 1;
3053 TREE_NOTHROW (fn) = 1;
3055 block_clear_fn = fn;
3058 if (for_call && !emitted_extern)
3060 emitted_extern = true;
3061 make_decl_rtl (fn, NULL);
3062 assemble_external (fn);
3068 /* Generate code to copy Y into X.
3069 Both Y and X must have the same mode, except that
3070 Y can be a constant with VOIDmode.
3071 This mode cannot be BLKmode; use emit_block_move for that.
3073 Return the last instruction emitted. */
3076 emit_move_insn (x, y)
3079 enum machine_mode mode = GET_MODE (x);
3080 rtx y_cst = NULL_RTX;
3083 x = protect_from_queue (x, 1);
3084 y = protect_from_queue (y, 0);
3086 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3089 /* Never force constant_p_rtx to memory. */
3090 if (GET_CODE (y) == CONSTANT_P_RTX)
3092 else if (CONSTANT_P (y))
3095 && FLOAT_MODE_P (GET_MODE (x))
3096 && (last_insn = compress_float_constant (x, y)))
3099 if (!LEGITIMATE_CONSTANT_P (y))
3102 y = force_const_mem (mode, y);
3106 /* If X or Y are memory references, verify that their addresses are valid
3108 if (GET_CODE (x) == MEM
3109 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3110 && ! push_operand (x, GET_MODE (x)))
3112 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3113 x = validize_mem (x);
3115 if (GET_CODE (y) == MEM
3116 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3118 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3119 y = validize_mem (y);
3121 if (mode == BLKmode)
3124 last_insn = emit_move_insn_1 (x, y);
3126 if (y_cst && GET_CODE (x) == REG)
3127 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3132 /* Low level part of emit_move_insn.
3133 Called just like emit_move_insn, but assumes X and Y
3134 are basically valid. */
3137 emit_move_insn_1 (x, y)
3140 enum machine_mode mode = GET_MODE (x);
3141 enum machine_mode submode;
3142 enum mode_class class = GET_MODE_CLASS (mode);
3144 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3147 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3149 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3151 /* Expand complex moves by moving real part and imag part, if possible. */
3152 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3153 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
3155 (class == MODE_COMPLEX_INT
3156 ? MODE_INT : MODE_FLOAT),
3158 && (mov_optab->handlers[(int) submode].insn_code
3159 != CODE_FOR_nothing))
3161 /* Don't split destination if it is a stack push. */
3162 int stack = push_operand (x, GET_MODE (x));
3164 #ifdef PUSH_ROUNDING
3165 /* In case we output to the stack, but the size is smaller machine can
3166 push exactly, we need to use move instructions. */
3168 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3169 != GET_MODE_SIZE (submode)))
3172 HOST_WIDE_INT offset1, offset2;
3174 /* Do not use anti_adjust_stack, since we don't want to update
3175 stack_pointer_delta. */
3176 temp = expand_binop (Pmode,
3177 #ifdef STACK_GROWS_DOWNWARD
3185 (GET_MODE_SIZE (GET_MODE (x)))),
3186 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3188 if (temp != stack_pointer_rtx)
3189 emit_move_insn (stack_pointer_rtx, temp);
3191 #ifdef STACK_GROWS_DOWNWARD
3193 offset2 = GET_MODE_SIZE (submode);
3195 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3196 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3197 + GET_MODE_SIZE (submode));
3200 emit_move_insn (change_address (x, submode,
3201 gen_rtx_PLUS (Pmode,
3203 GEN_INT (offset1))),
3204 gen_realpart (submode, y));
3205 emit_move_insn (change_address (x, submode,
3206 gen_rtx_PLUS (Pmode,
3208 GEN_INT (offset2))),
3209 gen_imagpart (submode, y));
3213 /* If this is a stack, push the highpart first, so it
3214 will be in the argument order.
3216 In that case, change_address is used only to convert
3217 the mode, not to change the address. */
3220 /* Note that the real part always precedes the imag part in memory
3221 regardless of machine's endianness. */
3222 #ifdef STACK_GROWS_DOWNWARD
3223 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3224 (gen_rtx_MEM (submode, XEXP (x, 0)),
3225 gen_imagpart (submode, y)));
3226 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3227 (gen_rtx_MEM (submode, XEXP (x, 0)),
3228 gen_realpart (submode, y)));
3230 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3231 (gen_rtx_MEM (submode, XEXP (x, 0)),
3232 gen_realpart (submode, y)));
3233 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3234 (gen_rtx_MEM (submode, XEXP (x, 0)),
3235 gen_imagpart (submode, y)));
3240 rtx realpart_x, realpart_y;
3241 rtx imagpart_x, imagpart_y;
3243 /* If this is a complex value with each part being smaller than a
3244 word, the usual calling sequence will likely pack the pieces into
3245 a single register. Unfortunately, SUBREG of hard registers only
3246 deals in terms of words, so we have a problem converting input
3247 arguments to the CONCAT of two registers that is used elsewhere
3248 for complex values. If this is before reload, we can copy it into
3249 memory and reload. FIXME, we should see about using extract and
3250 insert on integer registers, but complex short and complex char
3251 variables should be rarely used. */
3252 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3253 && (reload_in_progress | reload_completed) == 0)
3256 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3258 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3260 if (packed_dest_p || packed_src_p)
3262 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3263 ? MODE_FLOAT : MODE_INT);
3265 enum machine_mode reg_mode
3266 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3268 if (reg_mode != BLKmode)
3270 rtx mem = assign_stack_temp (reg_mode,
3271 GET_MODE_SIZE (mode), 0);
3272 rtx cmem = adjust_address (mem, mode, 0);
3275 = N_("function using short complex types cannot be inline");
3279 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3281 emit_move_insn_1 (cmem, y);
3282 return emit_move_insn_1 (sreg, mem);
3286 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3288 emit_move_insn_1 (mem, sreg);
3289 return emit_move_insn_1 (x, cmem);
3295 realpart_x = gen_realpart (submode, x);
3296 realpart_y = gen_realpart (submode, y);
3297 imagpart_x = gen_imagpart (submode, x);
3298 imagpart_y = gen_imagpart (submode, y);
3300 /* Show the output dies here. This is necessary for SUBREGs
3301 of pseudos since we cannot track their lifetimes correctly;
3302 hard regs shouldn't appear here except as return values.
3303 We never want to emit such a clobber after reload. */
3305 && ! (reload_in_progress || reload_completed)
3306 && (GET_CODE (realpart_x) == SUBREG
3307 || GET_CODE (imagpart_x) == SUBREG))
3308 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3310 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3311 (realpart_x, realpart_y));
3312 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3313 (imagpart_x, imagpart_y));
3316 return get_last_insn ();
3319 /* This will handle any multi-word or full-word mode that lacks a move_insn
3320 pattern. However, you will get better code if you define such patterns,
3321 even if they must turn into multiple assembler instructions. */
3322 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3329 #ifdef PUSH_ROUNDING
3331 /* If X is a push on the stack, do the push now and replace
3332 X with a reference to the stack pointer. */
3333 if (push_operand (x, GET_MODE (x)))
3338 /* Do not use anti_adjust_stack, since we don't want to update
3339 stack_pointer_delta. */
3340 temp = expand_binop (Pmode,
3341 #ifdef STACK_GROWS_DOWNWARD
3349 (GET_MODE_SIZE (GET_MODE (x)))),
3350 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3352 if (temp != stack_pointer_rtx)
3353 emit_move_insn (stack_pointer_rtx, temp);
3355 code = GET_CODE (XEXP (x, 0));
3357 /* Just hope that small offsets off SP are OK. */
3358 if (code == POST_INC)
3359 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3360 GEN_INT (-((HOST_WIDE_INT)
3361 GET_MODE_SIZE (GET_MODE (x)))));
3362 else if (code == POST_DEC)
3363 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3364 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3366 temp = stack_pointer_rtx;
3368 x = change_address (x, VOIDmode, temp);
3372 /* If we are in reload, see if either operand is a MEM whose address
3373 is scheduled for replacement. */
3374 if (reload_in_progress && GET_CODE (x) == MEM
3375 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3376 x = replace_equiv_address_nv (x, inner);
3377 if (reload_in_progress && GET_CODE (y) == MEM
3378 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3379 y = replace_equiv_address_nv (y, inner);
3385 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3388 rtx xpart = operand_subword (x, i, 1, mode);
3389 rtx ypart = operand_subword (y, i, 1, mode);
3391 /* If we can't get a part of Y, put Y into memory if it is a
3392 constant. Otherwise, force it into a register. If we still
3393 can't get a part of Y, abort. */
3394 if (ypart == 0 && CONSTANT_P (y))
3396 y = force_const_mem (mode, y);
3397 ypart = operand_subword (y, i, 1, mode);
3399 else if (ypart == 0)
3400 ypart = operand_subword_force (y, i, mode);
3402 if (xpart == 0 || ypart == 0)
3405 need_clobber |= (GET_CODE (xpart) == SUBREG);
3407 last_insn = emit_move_insn (xpart, ypart);
3413 /* Show the output dies here. This is necessary for SUBREGs
3414 of pseudos since we cannot track their lifetimes correctly;
3415 hard regs shouldn't appear here except as return values.
3416 We never want to emit such a clobber after reload. */
3418 && ! (reload_in_progress || reload_completed)
3419 && need_clobber != 0)
3420 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3430 /* If Y is representable exactly in a narrower mode, and the target can
3431 perform the extension directly from constant or memory, then emit the
3432 move as an extension. */
3435 compress_float_constant (x, y)
3438 enum machine_mode dstmode = GET_MODE (x);
3439 enum machine_mode orig_srcmode = GET_MODE (y);
3440 enum machine_mode srcmode;
3443 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3445 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3446 srcmode != orig_srcmode;
3447 srcmode = GET_MODE_WIDER_MODE (srcmode))
3450 rtx trunc_y, last_insn;
3452 /* Skip if the target can't extend this way. */
3453 ic = can_extend_p (dstmode, srcmode, 0);
3454 if (ic == CODE_FOR_nothing)
3457 /* Skip if the narrowed value isn't exact. */
3458 if (! exact_real_truncate (srcmode, &r))
3461 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3463 if (LEGITIMATE_CONSTANT_P (trunc_y))
3465 /* Skip if the target needs extra instructions to perform
3467 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3470 else if (float_extend_from_mem[dstmode][srcmode])
3471 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3475 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3476 last_insn = get_last_insn ();
3478 if (GET_CODE (x) == REG)
3479 REG_NOTES (last_insn)
3480 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3488 /* Pushing data onto the stack. */
3490 /* Push a block of length SIZE (perhaps variable)
3491 and return an rtx to address the beginning of the block.
3492 Note that it is not possible for the value returned to be a QUEUED.
3493 The value may be virtual_outgoing_args_rtx.
3495 EXTRA is the number of bytes of padding to push in addition to SIZE.
3496 BELOW nonzero means this padding comes at low addresses;
3497 otherwise, the padding comes at high addresses. */
3500 push_block (size, extra, below)
3506 size = convert_modes (Pmode, ptr_mode, size, 1);
3507 if (CONSTANT_P (size))
3508 anti_adjust_stack (plus_constant (size, extra));
3509 else if (GET_CODE (size) == REG && extra == 0)
3510 anti_adjust_stack (size);
3513 temp = copy_to_mode_reg (Pmode, size);
3515 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3516 temp, 0, OPTAB_LIB_WIDEN);
3517 anti_adjust_stack (temp);
3520 #ifndef STACK_GROWS_DOWNWARD
3526 temp = virtual_outgoing_args_rtx;
3527 if (extra != 0 && below)
3528 temp = plus_constant (temp, extra);
3532 if (GET_CODE (size) == CONST_INT)
3533 temp = plus_constant (virtual_outgoing_args_rtx,
3534 -INTVAL (size) - (below ? 0 : extra));
3535 else if (extra != 0 && !below)
3536 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3537 negate_rtx (Pmode, plus_constant (size, extra)));
3539 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3540 negate_rtx (Pmode, size));
3543 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3546 #ifdef PUSH_ROUNDING
3548 /* Emit single push insn. */
3551 emit_single_push_insn (mode, x, type)
3553 enum machine_mode mode;
3557 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3559 enum insn_code icode;
3560 insn_operand_predicate_fn pred;
3562 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3563 /* If there is push pattern, use it. Otherwise try old way of throwing
3564 MEM representing push operation to move expander. */
3565 icode = push_optab->handlers[(int) mode].insn_code;
3566 if (icode != CODE_FOR_nothing)
3568 if (((pred = insn_data[(int) icode].operand[0].predicate)
3569 && !((*pred) (x, mode))))
3570 x = force_reg (mode, x);
3571 emit_insn (GEN_FCN (icode) (x));
3574 if (GET_MODE_SIZE (mode) == rounded_size)
3575 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3578 #ifdef STACK_GROWS_DOWNWARD
3579 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3580 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3582 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3583 GEN_INT (rounded_size));
3585 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3588 dest = gen_rtx_MEM (mode, dest_addr);
3592 set_mem_attributes (dest, type, 1);
3594 if (flag_optimize_sibling_calls)
3595 /* Function incoming arguments may overlap with sibling call
3596 outgoing arguments and we cannot allow reordering of reads
3597 from function arguments with stores to outgoing arguments
3598 of sibling calls. */
3599 set_mem_alias_set (dest, 0);
3601 emit_move_insn (dest, x);
3605 /* Generate code to push X onto the stack, assuming it has mode MODE and
3607 MODE is redundant except when X is a CONST_INT (since they don't
3609 SIZE is an rtx for the size of data to be copied (in bytes),
3610 needed only if X is BLKmode.
3612 ALIGN (in bits) is maximum alignment we can assume.
3614 If PARTIAL and REG are both nonzero, then copy that many of the first
3615 words of X into registers starting with REG, and push the rest of X.
3616 The amount of space pushed is decreased by PARTIAL words,
3617 rounded *down* to a multiple of PARM_BOUNDARY.
3618 REG must be a hard register in this case.
3619 If REG is zero but PARTIAL is not, take any all others actions for an
3620 argument partially in registers, but do not actually load any
3623 EXTRA is the amount in bytes of extra space to leave next to this arg.
3624 This is ignored if an argument block has already been allocated.
3626 On a machine that lacks real push insns, ARGS_ADDR is the address of
3627 the bottom of the argument block for this call. We use indexing off there
3628 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3629 argument block has not been preallocated.
3631 ARGS_SO_FAR is the size of args previously pushed for this call.
3633 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3634 for arguments passed in registers. If nonzero, it will be the number
3635 of bytes required. */
3638 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3639 args_addr, args_so_far, reg_parm_stack_space,
3642 enum machine_mode mode;
3651 int reg_parm_stack_space;
3655 enum direction stack_direction
3656 #ifdef STACK_GROWS_DOWNWARD
3662 /* Decide where to pad the argument: `downward' for below,
3663 `upward' for above, or `none' for don't pad it.
3664 Default is below for small data on big-endian machines; else above. */
3665 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3667 /* Invert direction if stack is post-decrement.
3669 if (STACK_PUSH_CODE == POST_DEC)
3670 if (where_pad != none)
3671 where_pad = (where_pad == downward ? upward : downward);
3673 xinner = x = protect_from_queue (x, 0);
3675 if (mode == BLKmode)
3677 /* Copy a block into the stack, entirely or partially. */
3680 int used = partial * UNITS_PER_WORD;
3681 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3689 /* USED is now the # of bytes we need not copy to the stack
3690 because registers will take care of them. */
3693 xinner = adjust_address (xinner, BLKmode, used);
3695 /* If the partial register-part of the arg counts in its stack size,
3696 skip the part of stack space corresponding to the registers.
3697 Otherwise, start copying to the beginning of the stack space,
3698 by setting SKIP to 0. */
3699 skip = (reg_parm_stack_space == 0) ? 0 : used;
3701 #ifdef PUSH_ROUNDING
3702 /* Do it with several push insns if that doesn't take lots of insns
3703 and if there is no difficulty with push insns that skip bytes
3704 on the stack for alignment purposes. */
3707 && GET_CODE (size) == CONST_INT
3709 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3710 /* Here we avoid the case of a structure whose weak alignment
3711 forces many pushes of a small amount of data,
3712 and such small pushes do rounding that causes trouble. */
3713 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3714 || align >= BIGGEST_ALIGNMENT
3715 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3716 == (align / BITS_PER_UNIT)))
3717 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3719 /* Push padding now if padding above and stack grows down,
3720 or if padding below and stack grows up.
3721 But if space already allocated, this has already been done. */
3722 if (extra && args_addr == 0
3723 && where_pad != none && where_pad != stack_direction)
3724 anti_adjust_stack (GEN_INT (extra));
3726 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3729 #endif /* PUSH_ROUNDING */
3733 /* Otherwise make space on the stack and copy the data
3734 to the address of that space. */
3736 /* Deduct words put into registers from the size we must copy. */
3739 if (GET_CODE (size) == CONST_INT)
3740 size = GEN_INT (INTVAL (size) - used);
3742 size = expand_binop (GET_MODE (size), sub_optab, size,
3743 GEN_INT (used), NULL_RTX, 0,
3747 /* Get the address of the stack space.
3748 In this case, we do not deal with EXTRA separately.
3749 A single stack adjust will do. */
3752 temp = push_block (size, extra, where_pad == downward);
3755 else if (GET_CODE (args_so_far) == CONST_INT)
3756 temp = memory_address (BLKmode,
3757 plus_constant (args_addr,
3758 skip + INTVAL (args_so_far)));
3760 temp = memory_address (BLKmode,
3761 plus_constant (gen_rtx_PLUS (Pmode,
3766 if (!ACCUMULATE_OUTGOING_ARGS)
3768 /* If the source is referenced relative to the stack pointer,
3769 copy it to another register to stabilize it. We do not need
3770 to do this if we know that we won't be changing sp. */
3772 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3773 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3774 temp = copy_to_reg (temp);
3777 target = gen_rtx_MEM (BLKmode, temp);
3781 set_mem_attributes (target, type, 1);
3782 /* Function incoming arguments may overlap with sibling call
3783 outgoing arguments and we cannot allow reordering of reads
3784 from function arguments with stores to outgoing arguments
3785 of sibling calls. */
3786 set_mem_alias_set (target, 0);
3789 /* ALIGN may well be better aligned than TYPE, e.g. due to
3790 PARM_BOUNDARY. Assume the caller isn't lying. */
3791 set_mem_align (target, align);
3793 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3796 else if (partial > 0)
3798 /* Scalar partly in registers. */
3800 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3803 /* # words of start of argument
3804 that we must make space for but need not store. */
3805 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3806 int args_offset = INTVAL (args_so_far);
3809 /* Push padding now if padding above and stack grows down,
3810 or if padding below and stack grows up.
3811 But if space already allocated, this has already been done. */
3812 if (extra && args_addr == 0
3813 && where_pad != none && where_pad != stack_direction)
3814 anti_adjust_stack (GEN_INT (extra));
3816 /* If we make space by pushing it, we might as well push
3817 the real data. Otherwise, we can leave OFFSET nonzero
3818 and leave the space uninitialized. */
3822 /* Now NOT_STACK gets the number of words that we don't need to
3823 allocate on the stack. */
3824 not_stack = partial - offset;
3826 /* If the partial register-part of the arg counts in its stack size,
3827 skip the part of stack space corresponding to the registers.
3828 Otherwise, start copying to the beginning of the stack space,
3829 by setting SKIP to 0. */
3830 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3832 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3833 x = validize_mem (force_const_mem (mode, x));
3835 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3836 SUBREGs of such registers are not allowed. */
3837 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3838 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3839 x = copy_to_reg (x);
3841 /* Loop over all the words allocated on the stack for this arg. */
3842 /* We can do it by words, because any scalar bigger than a word
3843 has a size a multiple of a word. */
3844 #ifndef PUSH_ARGS_REVERSED
3845 for (i = not_stack; i < size; i++)
3847 for (i = size - 1; i >= not_stack; i--)
3849 if (i >= not_stack + offset)
3850 emit_push_insn (operand_subword_force (x, i, mode),
3851 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3853 GEN_INT (args_offset + ((i - not_stack + skip)
3855 reg_parm_stack_space, alignment_pad);
3860 rtx target = NULL_RTX;
3863 /* Push padding now if padding above and stack grows down,
3864 or if padding below and stack grows up.
3865 But if space already allocated, this has already been done. */
3866 if (extra && args_addr == 0
3867 && where_pad != none && where_pad != stack_direction)
3868 anti_adjust_stack (GEN_INT (extra));
3870 #ifdef PUSH_ROUNDING
3871 if (args_addr == 0 && PUSH_ARGS)
3872 emit_single_push_insn (mode, x, type);
3876 if (GET_CODE (args_so_far) == CONST_INT)
3878 = memory_address (mode,
3879 plus_constant (args_addr,
3880 INTVAL (args_so_far)));
3882 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3885 dest = gen_rtx_MEM (mode, addr);
3888 set_mem_attributes (dest, type, 1);
3889 /* Function incoming arguments may overlap with sibling call
3890 outgoing arguments and we cannot allow reordering of reads
3891 from function arguments with stores to outgoing arguments
3892 of sibling calls. */
3893 set_mem_alias_set (dest, 0);
3896 emit_move_insn (dest, x);
3900 /* If part should go in registers, copy that part
3901 into the appropriate registers. Do this now, at the end,
3902 since mem-to-mem copies above may do function calls. */
3903 if (partial > 0 && reg != 0)
3905 /* Handle calls that pass values in multiple non-contiguous locations.
3906 The Irix 6 ABI has examples of this. */
3907 if (GET_CODE (reg) == PARALLEL)
3908 emit_group_load (reg, x, -1); /* ??? size? */
3910 move_block_to_reg (REGNO (reg), x, partial, mode);
3913 if (extra && args_addr == 0 && where_pad == stack_direction)
3914 anti_adjust_stack (GEN_INT (extra));
3916 if (alignment_pad && args_addr == 0)
3917 anti_adjust_stack (alignment_pad);
3920 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3928 /* Only registers can be subtargets. */
3929 || GET_CODE (x) != REG
3930 /* If the register is readonly, it can't be set more than once. */
3931 || RTX_UNCHANGING_P (x)
3932 /* Don't use hard regs to avoid extending their life. */
3933 || REGNO (x) < FIRST_PSEUDO_REGISTER
3934 /* Avoid subtargets inside loops,
3935 since they hide some invariant expressions. */
3936 || preserve_subexpressions_p ())
3940 /* Expand an assignment that stores the value of FROM into TO.
3941 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3942 (This may contain a QUEUED rtx;
3943 if the value is constant, this rtx is a constant.)
3944 Otherwise, the returned value is NULL_RTX.
3946 SUGGEST_REG is no longer actually used.
3947 It used to mean, copy the value through a register
3948 and return that register, if that is possible.
3949 We now use WANT_VALUE to decide whether to do this. */
3952 expand_assignment (to, from, want_value, suggest_reg)
3955 int suggest_reg ATTRIBUTE_UNUSED;
3960 /* Don't crash if the lhs of the assignment was erroneous. */
3962 if (TREE_CODE (to) == ERROR_MARK)
3964 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3965 return want_value ? result : NULL_RTX;
3968 /* Assignment of a structure component needs special treatment
3969 if the structure component's rtx is not simply a MEM.
3970 Assignment of an array element at a constant index, and assignment of
3971 an array element in an unaligned packed structure field, has the same
3974 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3975 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3977 enum machine_mode mode1;
3978 HOST_WIDE_INT bitsize, bitpos;
3986 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3987 &unsignedp, &volatilep);
3989 /* If we are going to use store_bit_field and extract_bit_field,
3990 make sure to_rtx will be safe for multiple use. */
3992 if (mode1 == VOIDmode && want_value)
3993 tem = stabilize_reference (tem);
3995 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3999 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4001 if (GET_CODE (to_rtx) != MEM)
4004 #ifdef POINTERS_EXTEND_UNSIGNED
4005 if (GET_MODE (offset_rtx) != Pmode)
4006 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4008 if (GET_MODE (offset_rtx) != ptr_mode)
4009 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4012 /* A constant address in TO_RTX can have VOIDmode, we must not try
4013 to call force_reg for that case. Avoid that case. */
4014 if (GET_CODE (to_rtx) == MEM
4015 && GET_MODE (to_rtx) == BLKmode
4016 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4018 && (bitpos % bitsize) == 0
4019 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4020 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4022 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4026 to_rtx = offset_address (to_rtx, offset_rtx,
4027 highest_pow2_factor_for_type (TREE_TYPE (to),
4031 if (GET_CODE (to_rtx) == MEM)
4033 /* If the field is at offset zero, we could have been given the
4034 DECL_RTX of the parent struct. Don't munge it. */
4035 to_rtx = shallow_copy_rtx (to_rtx);
4037 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4040 /* Deal with volatile and readonly fields. The former is only done
4041 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4042 if (volatilep && GET_CODE (to_rtx) == MEM)
4044 if (to_rtx == orig_to_rtx)
4045 to_rtx = copy_rtx (to_rtx);
4046 MEM_VOLATILE_P (to_rtx) = 1;
4049 if (TREE_CODE (to) == COMPONENT_REF
4050 && TREE_READONLY (TREE_OPERAND (to, 1)))
4052 if (to_rtx == orig_to_rtx)
4053 to_rtx = copy_rtx (to_rtx);
4054 RTX_UNCHANGING_P (to_rtx) = 1;
4057 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4059 if (to_rtx == orig_to_rtx)
4060 to_rtx = copy_rtx (to_rtx);
4061 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4064 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4066 /* Spurious cast for HPUX compiler. */
4067 ? ((enum machine_mode)
4068 TYPE_MODE (TREE_TYPE (to)))
4070 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4072 preserve_temp_slots (result);
4076 /* If the value is meaningful, convert RESULT to the proper mode.
4077 Otherwise, return nothing. */
4078 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4079 TYPE_MODE (TREE_TYPE (from)),
4081 TREE_UNSIGNED (TREE_TYPE (to)))
4085 /* If the rhs is a function call and its value is not an aggregate,
4086 call the function before we start to compute the lhs.
4087 This is needed for correct code for cases such as
4088 val = setjmp (buf) on machines where reference to val
4089 requires loading up part of an address in a separate insn.
4091 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4092 since it might be a promoted variable where the zero- or sign- extension
4093 needs to be done. Handling this in the normal way is safe because no
4094 computation is done before the call. */
4095 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4096 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4097 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4098 && GET_CODE (DECL_RTL (to)) == REG))
4103 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4105 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4107 /* Handle calls that return values in multiple non-contiguous locations.
4108 The Irix 6 ABI has examples of this. */
4109 if (GET_CODE (to_rtx) == PARALLEL)
4110 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4111 else if (GET_MODE (to_rtx) == BLKmode)
4112 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4115 #ifdef POINTERS_EXTEND_UNSIGNED
4116 if (POINTER_TYPE_P (TREE_TYPE (to))
4117 && GET_MODE (to_rtx) != GET_MODE (value))
4118 value = convert_memory_address (GET_MODE (to_rtx), value);
4120 emit_move_insn (to_rtx, value);
4122 preserve_temp_slots (to_rtx);
4125 return want_value ? to_rtx : NULL_RTX;
4128 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4129 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4132 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4134 /* Don't move directly into a return register. */
4135 if (TREE_CODE (to) == RESULT_DECL
4136 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4141 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4143 if (GET_CODE (to_rtx) == PARALLEL)
4144 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4146 emit_move_insn (to_rtx, temp);
4148 preserve_temp_slots (to_rtx);
4151 return want_value ? to_rtx : NULL_RTX;
4154 /* In case we are returning the contents of an object which overlaps
4155 the place the value is being stored, use a safe function when copying
4156 a value through a pointer into a structure value return block. */
4157 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4158 && current_function_returns_struct
4159 && !current_function_returns_pcc_struct)
4164 size = expr_size (from);
4165 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4167 if (TARGET_MEM_FUNCTIONS)
4168 emit_library_call (memmove_libfunc, LCT_NORMAL,
4169 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4170 XEXP (from_rtx, 0), Pmode,
4171 convert_to_mode (TYPE_MODE (sizetype),
4172 size, TREE_UNSIGNED (sizetype)),
4173 TYPE_MODE (sizetype));
4175 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4176 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4177 XEXP (to_rtx, 0), Pmode,
4178 convert_to_mode (TYPE_MODE (integer_type_node),
4180 TREE_UNSIGNED (integer_type_node)),
4181 TYPE_MODE (integer_type_node));
4183 preserve_temp_slots (to_rtx);
4186 return want_value ? to_rtx : NULL_RTX;
4189 /* Compute FROM and store the value in the rtx we got. */
4192 result = store_expr (from, to_rtx, want_value);
4193 preserve_temp_slots (result);
4196 return want_value ? result : NULL_RTX;
4199 /* Generate code for computing expression EXP,
4200 and storing the value into TARGET.
4201 TARGET may contain a QUEUED rtx.
4203 If WANT_VALUE is nonzero, return a copy of the value
4204 not in TARGET, so that we can be sure to use the proper
4205 value in a containing expression even if TARGET has something
4206 else stored in it. If possible, we copy the value through a pseudo
4207 and return that pseudo. Or, if the value is constant, we try to
4208 return the constant. In some cases, we return a pseudo
4209 copied *from* TARGET.
4211 If the mode is BLKmode then we may return TARGET itself.
4212 It turns out that in BLKmode it doesn't cause a problem.
4213 because C has no operators that could combine two different
4214 assignments into the same BLKmode object with different values
4215 with no sequence point. Will other languages need this to
4218 If WANT_VALUE is 0, we return NULL, to make sure
4219 to catch quickly any cases where the caller uses the value
4220 and fails to set WANT_VALUE. */
4223 store_expr (exp, target, want_value)
4229 int dont_return_target = 0;
4230 int dont_store_target = 0;
4232 if (TREE_CODE (exp) == COMPOUND_EXPR)
4234 /* Perform first part of compound expression, then assign from second
4236 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4238 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4240 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4242 /* For conditional expression, get safe form of the target. Then
4243 test the condition, doing the appropriate assignment on either
4244 side. This avoids the creation of unnecessary temporaries.
4245 For non-BLKmode, it is more efficient not to do this. */
4247 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4250 target = protect_from_queue (target, 1);
4252 do_pending_stack_adjust ();
4254 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4255 start_cleanup_deferral ();
4256 store_expr (TREE_OPERAND (exp, 1), target, 0);
4257 end_cleanup_deferral ();
4259 emit_jump_insn (gen_jump (lab2));
4262 start_cleanup_deferral ();
4263 store_expr (TREE_OPERAND (exp, 2), target, 0);
4264 end_cleanup_deferral ();
4269 return want_value ? target : NULL_RTX;
4271 else if (queued_subexp_p (target))
4272 /* If target contains a postincrement, let's not risk
4273 using it as the place to generate the rhs. */
4275 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4277 /* Expand EXP into a new pseudo. */
4278 temp = gen_reg_rtx (GET_MODE (target));
4279 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4282 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4284 /* If target is volatile, ANSI requires accessing the value
4285 *from* the target, if it is accessed. So make that happen.
4286 In no case return the target itself. */
4287 if (! MEM_VOLATILE_P (target) && want_value)
4288 dont_return_target = 1;
4290 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4291 && GET_MODE (target) != BLKmode)
4292 /* If target is in memory and caller wants value in a register instead,
4293 arrange that. Pass TARGET as target for expand_expr so that,
4294 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4295 We know expand_expr will not use the target in that case.
4296 Don't do this if TARGET is volatile because we are supposed
4297 to write it and then read it. */
4299 temp = expand_expr (exp, target, GET_MODE (target), 0);
4300 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4302 /* If TEMP is already in the desired TARGET, only copy it from
4303 memory and don't store it there again. */
4305 || (rtx_equal_p (temp, target)
4306 && ! side_effects_p (temp) && ! side_effects_p (target)))
4307 dont_store_target = 1;
4308 temp = copy_to_reg (temp);
4310 dont_return_target = 1;
4312 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4313 /* If this is an scalar in a register that is stored in a wider mode
4314 than the declared mode, compute the result into its declared mode
4315 and then convert to the wider mode. Our value is the computed
4318 rtx inner_target = 0;
4320 /* If we don't want a value, we can do the conversion inside EXP,
4321 which will often result in some optimizations. Do the conversion
4322 in two steps: first change the signedness, if needed, then
4323 the extend. But don't do this if the type of EXP is a subtype
4324 of something else since then the conversion might involve
4325 more than just converting modes. */
4326 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4327 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4329 if (TREE_UNSIGNED (TREE_TYPE (exp))
4330 != SUBREG_PROMOTED_UNSIGNED_P (target))
4332 ((*lang_hooks.types.signed_or_unsigned_type)
4333 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4335 exp = convert ((*lang_hooks.types.type_for_mode)
4336 (GET_MODE (SUBREG_REG (target)),
4337 SUBREG_PROMOTED_UNSIGNED_P (target)),
4340 inner_target = SUBREG_REG (target);
4343 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4345 /* If TEMP is a volatile MEM and we want a result value, make
4346 the access now so it gets done only once. Likewise if
4347 it contains TARGET. */
4348 if (GET_CODE (temp) == MEM && want_value
4349 && (MEM_VOLATILE_P (temp)
4350 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4351 temp = copy_to_reg (temp);
4353 /* If TEMP is a VOIDmode constant, use convert_modes to make
4354 sure that we properly convert it. */
4355 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4357 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4358 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4359 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4360 GET_MODE (target), temp,
4361 SUBREG_PROMOTED_UNSIGNED_P (target));
4364 convert_move (SUBREG_REG (target), temp,
4365 SUBREG_PROMOTED_UNSIGNED_P (target));
4367 /* If we promoted a constant, change the mode back down to match
4368 target. Otherwise, the caller might get confused by a result whose
4369 mode is larger than expected. */
4371 if (want_value && GET_MODE (temp) != GET_MODE (target))
4373 if (GET_MODE (temp) != VOIDmode)
4375 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4376 SUBREG_PROMOTED_VAR_P (temp) = 1;
4377 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4378 SUBREG_PROMOTED_UNSIGNED_P (target));
4381 temp = convert_modes (GET_MODE (target),
4382 GET_MODE (SUBREG_REG (target)),
4383 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4386 return want_value ? temp : NULL_RTX;
4390 temp = expand_expr (exp, target, GET_MODE (target), 0);
4391 /* Return TARGET if it's a specified hardware register.
4392 If TARGET is a volatile mem ref, either return TARGET
4393 or return a reg copied *from* TARGET; ANSI requires this.
4395 Otherwise, if TEMP is not TARGET, return TEMP
4396 if it is constant (for efficiency),
4397 or if we really want the correct value. */
4398 if (!(target && GET_CODE (target) == REG
4399 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4400 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4401 && ! rtx_equal_p (temp, target)
4402 && (CONSTANT_P (temp) || want_value))
4403 dont_return_target = 1;
4406 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4407 the same as that of TARGET, adjust the constant. This is needed, for
4408 example, in case it is a CONST_DOUBLE and we want only a word-sized
4410 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4411 && TREE_CODE (exp) != ERROR_MARK
4412 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4413 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4414 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4416 /* If value was not generated in the target, store it there.
4417 Convert the value to TARGET's type first if necessary.
4418 If TEMP and TARGET compare equal according to rtx_equal_p, but
4419 one or both of them are volatile memory refs, we have to distinguish
4421 - expand_expr has used TARGET. In this case, we must not generate
4422 another copy. This can be detected by TARGET being equal according
4424 - expand_expr has not used TARGET - that means that the source just
4425 happens to have the same RTX form. Since temp will have been created
4426 by expand_expr, it will compare unequal according to == .
4427 We must generate a copy in this case, to reach the correct number
4428 of volatile memory references. */
4430 if ((! rtx_equal_p (temp, target)
4431 || (temp != target && (side_effects_p (temp)
4432 || side_effects_p (target))))
4433 && TREE_CODE (exp) != ERROR_MARK
4434 && ! dont_store_target
4435 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4436 but TARGET is not valid memory reference, TEMP will differ
4437 from TARGET although it is really the same location. */
4438 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4439 || target != DECL_RTL_IF_SET (exp))
4440 /* If there's nothing to copy, don't bother. Don't call expr_size
4441 unless necessary, because some front-ends (C++) expr_size-hook
4442 aborts on objects that are not supposed to be bit-copied or
4444 && expr_size (exp) != const0_rtx)
4446 target = protect_from_queue (target, 1);
4447 if (GET_MODE (temp) != GET_MODE (target)
4448 && GET_MODE (temp) != VOIDmode)
4450 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4451 if (dont_return_target)
4453 /* In this case, we will return TEMP,
4454 so make sure it has the proper mode.
4455 But don't forget to store the value into TARGET. */
4456 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4457 emit_move_insn (target, temp);
4460 convert_move (target, temp, unsignedp);
4463 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4465 /* Handle copying a string constant into an array. The string
4466 constant may be shorter than the array. So copy just the string's
4467 actual length, and clear the rest. First get the size of the data
4468 type of the string, which is actually the size of the target. */
4469 rtx size = expr_size (exp);
4471 if (GET_CODE (size) == CONST_INT
4472 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4473 emit_block_move (target, temp, size, BLOCK_OP_NORMAL);
4476 /* Compute the size of the data to copy from the string. */
4478 = size_binop (MIN_EXPR,
4479 make_tree (sizetype, size),
4480 size_int (TREE_STRING_LENGTH (exp)));
4481 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4485 /* Copy that much. */
4486 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4487 emit_block_move (target, temp, copy_size_rtx, BLOCK_OP_NORMAL);
4489 /* Figure out how much is left in TARGET that we have to clear.
4490 Do all calculations in ptr_mode. */
4491 if (GET_CODE (copy_size_rtx) == CONST_INT)
4493 size = plus_constant (size, -INTVAL (copy_size_rtx));
4494 target = adjust_address (target, BLKmode,
4495 INTVAL (copy_size_rtx));
4499 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4500 copy_size_rtx, NULL_RTX, 0,
4503 #ifdef POINTERS_EXTEND_UNSIGNED
4504 if (GET_MODE (copy_size_rtx) != Pmode)
4505 copy_size_rtx = convert_memory_address (Pmode,
4509 target = offset_address (target, copy_size_rtx,
4510 highest_pow2_factor (copy_size));
4511 label = gen_label_rtx ();
4512 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4513 GET_MODE (size), 0, label);
4516 if (size != const0_rtx)
4517 clear_storage (target, size);
4523 /* Handle calls that return values in multiple non-contiguous locations.
4524 The Irix 6 ABI has examples of this. */
4525 else if (GET_CODE (target) == PARALLEL)
4526 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4527 else if (GET_MODE (temp) == BLKmode)
4528 emit_block_move (target, temp, expr_size (exp), BLOCK_OP_NORMAL);
4530 emit_move_insn (target, temp);
4533 /* If we don't want a value, return NULL_RTX. */
4537 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4538 ??? The latter test doesn't seem to make sense. */
4539 else if (dont_return_target && GET_CODE (temp) != MEM)
4542 /* Return TARGET itself if it is a hard register. */
4543 else if (want_value && GET_MODE (target) != BLKmode
4544 && ! (GET_CODE (target) == REG
4545 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4546 return copy_to_reg (target);
4552 /* Return 1 if EXP just contains zeros. */
4560 switch (TREE_CODE (exp))
4564 case NON_LVALUE_EXPR:
4565 case VIEW_CONVERT_EXPR:
4566 return is_zeros_p (TREE_OPERAND (exp, 0));
4569 return integer_zerop (exp);
4573 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4576 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4579 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4580 elt = TREE_CHAIN (elt))
4581 if (!is_zeros_p (TREE_VALUE (elt)))
4587 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4588 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4589 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4590 if (! is_zeros_p (TREE_VALUE (elt)))
4600 /* Return 1 if EXP contains mostly (3/4) zeros. */
4603 mostly_zeros_p (exp)
4606 if (TREE_CODE (exp) == CONSTRUCTOR)
4608 int elts = 0, zeros = 0;
4609 tree elt = CONSTRUCTOR_ELTS (exp);
4610 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4612 /* If there are no ranges of true bits, it is all zero. */
4613 return elt == NULL_TREE;
4615 for (; elt; elt = TREE_CHAIN (elt))
4617 /* We do not handle the case where the index is a RANGE_EXPR,
4618 so the statistic will be somewhat inaccurate.
4619 We do make a more accurate count in store_constructor itself,
4620 so since this function is only used for nested array elements,
4621 this should be close enough. */
4622 if (mostly_zeros_p (TREE_VALUE (elt)))
4627 return 4 * zeros >= 3 * elts;
4630 return is_zeros_p (exp);
4633 /* Helper function for store_constructor.
4634 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4635 TYPE is the type of the CONSTRUCTOR, not the element type.
4636 CLEARED is as for store_constructor.
4637 ALIAS_SET is the alias set to use for any stores.
4639 This provides a recursive shortcut back to store_constructor when it isn't
4640 necessary to go through store_field. This is so that we can pass through
4641 the cleared field to let store_constructor know that we may not have to
4642 clear a substructure if the outer structure has already been cleared. */
4645 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4648 unsigned HOST_WIDE_INT bitsize;
4649 HOST_WIDE_INT bitpos;
4650 enum machine_mode mode;
4655 if (TREE_CODE (exp) == CONSTRUCTOR
4656 && bitpos % BITS_PER_UNIT == 0
4657 /* If we have a nonzero bitpos for a register target, then we just
4658 let store_field do the bitfield handling. This is unlikely to
4659 generate unnecessary clear instructions anyways. */
4660 && (bitpos == 0 || GET_CODE (target) == MEM))
4662 if (GET_CODE (target) == MEM)
4664 = adjust_address (target,
4665 GET_MODE (target) == BLKmode
4667 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4668 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4671 /* Update the alias set, if required. */
4672 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4673 && MEM_ALIAS_SET (target) != 0)
4675 target = copy_rtx (target);
4676 set_mem_alias_set (target, alias_set);
4679 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4682 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4686 /* Store the value of constructor EXP into the rtx TARGET.
4687 TARGET is either a REG or a MEM; we know it cannot conflict, since
4688 safe_from_p has been called.
4689 CLEARED is true if TARGET is known to have been zero'd.
4690 SIZE is the number of bytes of TARGET we are allowed to modify: this
4691 may not be the same as the size of EXP if we are assigning to a field
4692 which has been packed to exclude padding bits. */
4695 store_constructor (exp, target, cleared, size)
4701 tree type = TREE_TYPE (exp);
4702 #ifdef WORD_REGISTER_OPERATIONS
4703 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4706 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4707 || TREE_CODE (type) == QUAL_UNION_TYPE)
4711 /* We either clear the aggregate or indicate the value is dead. */
4712 if ((TREE_CODE (type) == UNION_TYPE
4713 || TREE_CODE (type) == QUAL_UNION_TYPE)
4715 && ! CONSTRUCTOR_ELTS (exp))
4716 /* If the constructor is empty, clear the union. */
4718 clear_storage (target, expr_size (exp));
4722 /* If we are building a static constructor into a register,
4723 set the initial value as zero so we can fold the value into
4724 a constant. But if more than one register is involved,
4725 this probably loses. */
4726 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4727 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4729 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4733 /* If the constructor has fewer fields than the structure
4734 or if we are initializing the structure to mostly zeros,
4735 clear the whole structure first. Don't do this if TARGET is a
4736 register whose mode size isn't equal to SIZE since clear_storage
4737 can't handle this case. */
4738 else if (! cleared && size > 0
4739 && ((list_length (CONSTRUCTOR_ELTS (exp))
4740 != fields_length (type))
4741 || mostly_zeros_p (exp))
4742 && (GET_CODE (target) != REG
4743 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4746 clear_storage (target, GEN_INT (size));
4751 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4753 /* Store each element of the constructor into
4754 the corresponding field of TARGET. */
4756 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4758 tree field = TREE_PURPOSE (elt);
4759 tree value = TREE_VALUE (elt);
4760 enum machine_mode mode;
4761 HOST_WIDE_INT bitsize;
4762 HOST_WIDE_INT bitpos = 0;
4765 rtx to_rtx = target;
4767 /* Just ignore missing fields.
4768 We cleared the whole structure, above,
4769 if any fields are missing. */
4773 if (cleared && is_zeros_p (value))
4776 if (host_integerp (DECL_SIZE (field), 1))
4777 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4781 unsignedp = TREE_UNSIGNED (field);
4782 mode = DECL_MODE (field);
4783 if (DECL_BIT_FIELD (field))
4786 offset = DECL_FIELD_OFFSET (field);
4787 if (host_integerp (offset, 0)
4788 && host_integerp (bit_position (field), 0))
4790 bitpos = int_bit_position (field);
4794 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4800 if (contains_placeholder_p (offset))
4801 offset = build (WITH_RECORD_EXPR, sizetype,
4802 offset, make_tree (TREE_TYPE (exp), target));
4804 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4805 if (GET_CODE (to_rtx) != MEM)
4808 #ifdef POINTERS_EXTEND_UNSIGNED
4809 if (GET_MODE (offset_rtx) != Pmode)
4810 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4812 if (GET_MODE (offset_rtx) != ptr_mode)
4813 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4816 to_rtx = offset_address (to_rtx, offset_rtx,
4817 highest_pow2_factor (offset));
4820 if (TREE_READONLY (field))
4822 if (GET_CODE (to_rtx) == MEM)
4823 to_rtx = copy_rtx (to_rtx);
4825 RTX_UNCHANGING_P (to_rtx) = 1;
4828 #ifdef WORD_REGISTER_OPERATIONS
4829 /* If this initializes a field that is smaller than a word, at the
4830 start of a word, try to widen it to a full word.
4831 This special case allows us to output C++ member function
4832 initializations in a form that the optimizers can understand. */
4833 if (GET_CODE (target) == REG
4834 && bitsize < BITS_PER_WORD
4835 && bitpos % BITS_PER_WORD == 0
4836 && GET_MODE_CLASS (mode) == MODE_INT
4837 && TREE_CODE (value) == INTEGER_CST
4839 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4841 tree type = TREE_TYPE (value);
4843 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4845 type = (*lang_hooks.types.type_for_size)
4846 (BITS_PER_WORD, TREE_UNSIGNED (type));
4847 value = convert (type, value);
4850 if (BYTES_BIG_ENDIAN)
4852 = fold (build (LSHIFT_EXPR, type, value,
4853 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4854 bitsize = BITS_PER_WORD;
4859 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4860 && DECL_NONADDRESSABLE_P (field))
4862 to_rtx = copy_rtx (to_rtx);
4863 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4866 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4867 value, type, cleared,
4868 get_alias_set (TREE_TYPE (field)));
4871 else if (TREE_CODE (type) == ARRAY_TYPE
4872 || TREE_CODE (type) == VECTOR_TYPE)
4877 tree domain = TYPE_DOMAIN (type);
4878 tree elttype = TREE_TYPE (type);
4880 HOST_WIDE_INT minelt = 0;
4881 HOST_WIDE_INT maxelt = 0;
4883 /* Vectors are like arrays, but the domain is stored via an array
4885 if (TREE_CODE (type) == VECTOR_TYPE)
4887 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4888 the same field as TYPE_DOMAIN, we are not guaranteed that
4890 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4891 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4894 const_bounds_p = (TYPE_MIN_VALUE (domain)
4895 && TYPE_MAX_VALUE (domain)
4896 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4897 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4899 /* If we have constant bounds for the range of the type, get them. */
4902 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4903 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4906 /* If the constructor has fewer elements than the array,
4907 clear the whole array first. Similarly if this is
4908 static constructor of a non-BLKmode object. */
4909 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4913 HOST_WIDE_INT count = 0, zero_count = 0;
4914 need_to_clear = ! const_bounds_p;
4916 /* This loop is a more accurate version of the loop in
4917 mostly_zeros_p (it handles RANGE_EXPR in an index).
4918 It is also needed to check for missing elements. */
4919 for (elt = CONSTRUCTOR_ELTS (exp);
4920 elt != NULL_TREE && ! need_to_clear;
4921 elt = TREE_CHAIN (elt))
4923 tree index = TREE_PURPOSE (elt);
4924 HOST_WIDE_INT this_node_count;
4926 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4928 tree lo_index = TREE_OPERAND (index, 0);
4929 tree hi_index = TREE_OPERAND (index, 1);
4931 if (! host_integerp (lo_index, 1)
4932 || ! host_integerp (hi_index, 1))
4938 this_node_count = (tree_low_cst (hi_index, 1)
4939 - tree_low_cst (lo_index, 1) + 1);
4942 this_node_count = 1;
4944 count += this_node_count;
4945 if (mostly_zeros_p (TREE_VALUE (elt)))
4946 zero_count += this_node_count;
4949 /* Clear the entire array first if there are any missing elements,
4950 or if the incidence of zero elements is >= 75%. */
4952 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4956 if (need_to_clear && size > 0)
4961 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4963 clear_storage (target, GEN_INT (size));
4967 else if (REG_P (target))
4968 /* Inform later passes that the old value is dead. */
4969 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4971 /* Store each element of the constructor into
4972 the corresponding element of TARGET, determined
4973 by counting the elements. */
4974 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4976 elt = TREE_CHAIN (elt), i++)
4978 enum machine_mode mode;
4979 HOST_WIDE_INT bitsize;
4980 HOST_WIDE_INT bitpos;
4982 tree value = TREE_VALUE (elt);
4983 tree index = TREE_PURPOSE (elt);
4984 rtx xtarget = target;
4986 if (cleared && is_zeros_p (value))
4989 unsignedp = TREE_UNSIGNED (elttype);
4990 mode = TYPE_MODE (elttype);
4991 if (mode == BLKmode)
4992 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4993 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4996 bitsize = GET_MODE_BITSIZE (mode);
4998 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5000 tree lo_index = TREE_OPERAND (index, 0);
5001 tree hi_index = TREE_OPERAND (index, 1);
5002 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
5003 struct nesting *loop;
5004 HOST_WIDE_INT lo, hi, count;
5007 /* If the range is constant and "small", unroll the loop. */
5009 && host_integerp (lo_index, 0)
5010 && host_integerp (hi_index, 0)
5011 && (lo = tree_low_cst (lo_index, 0),
5012 hi = tree_low_cst (hi_index, 0),
5013 count = hi - lo + 1,
5014 (GET_CODE (target) != MEM
5016 || (host_integerp (TYPE_SIZE (elttype), 1)
5017 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5020 lo -= minelt; hi -= minelt;
5021 for (; lo <= hi; lo++)
5023 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5025 if (GET_CODE (target) == MEM
5026 && !MEM_KEEP_ALIAS_SET_P (target)
5027 && TREE_CODE (type) == ARRAY_TYPE
5028 && TYPE_NONALIASED_COMPONENT (type))
5030 target = copy_rtx (target);
5031 MEM_KEEP_ALIAS_SET_P (target) = 1;
5034 store_constructor_field
5035 (target, bitsize, bitpos, mode, value, type, cleared,
5036 get_alias_set (elttype));
5041 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5042 loop_top = gen_label_rtx ();
5043 loop_end = gen_label_rtx ();
5045 unsignedp = TREE_UNSIGNED (domain);
5047 index = build_decl (VAR_DECL, NULL_TREE, domain);
5050 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5052 SET_DECL_RTL (index, index_r);
5053 if (TREE_CODE (value) == SAVE_EXPR
5054 && SAVE_EXPR_RTL (value) == 0)
5056 /* Make sure value gets expanded once before the
5058 expand_expr (value, const0_rtx, VOIDmode, 0);
5061 store_expr (lo_index, index_r, 0);
5062 loop = expand_start_loop (0);
5064 /* Assign value to element index. */
5066 = convert (ssizetype,
5067 fold (build (MINUS_EXPR, TREE_TYPE (index),
5068 index, TYPE_MIN_VALUE (domain))));
5069 position = size_binop (MULT_EXPR, position,
5071 TYPE_SIZE_UNIT (elttype)));
5073 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5074 xtarget = offset_address (target, pos_rtx,
5075 highest_pow2_factor (position));
5076 xtarget = adjust_address (xtarget, mode, 0);
5077 if (TREE_CODE (value) == CONSTRUCTOR)
5078 store_constructor (value, xtarget, cleared,
5079 bitsize / BITS_PER_UNIT);
5081 store_expr (value, xtarget, 0);
5083 expand_exit_loop_if_false (loop,
5084 build (LT_EXPR, integer_type_node,
5087 expand_increment (build (PREINCREMENT_EXPR,
5089 index, integer_one_node), 0, 0);
5091 emit_label (loop_end);
5094 else if ((index != 0 && ! host_integerp (index, 0))
5095 || ! host_integerp (TYPE_SIZE (elttype), 1))
5100 index = ssize_int (1);
5103 index = convert (ssizetype,
5104 fold (build (MINUS_EXPR, index,
5105 TYPE_MIN_VALUE (domain))));
5107 position = size_binop (MULT_EXPR, index,
5109 TYPE_SIZE_UNIT (elttype)));
5110 xtarget = offset_address (target,
5111 expand_expr (position, 0, VOIDmode, 0),
5112 highest_pow2_factor (position));
5113 xtarget = adjust_address (xtarget, mode, 0);
5114 store_expr (value, xtarget, 0);
5119 bitpos = ((tree_low_cst (index, 0) - minelt)
5120 * tree_low_cst (TYPE_SIZE (elttype), 1));
5122 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5124 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5125 && TREE_CODE (type) == ARRAY_TYPE
5126 && TYPE_NONALIASED_COMPONENT (type))
5128 target = copy_rtx (target);
5129 MEM_KEEP_ALIAS_SET_P (target) = 1;
5132 store_constructor_field (target, bitsize, bitpos, mode, value,
5133 type, cleared, get_alias_set (elttype));
5139 /* Set constructor assignments. */
5140 else if (TREE_CODE (type) == SET_TYPE)
5142 tree elt = CONSTRUCTOR_ELTS (exp);
5143 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5144 tree domain = TYPE_DOMAIN (type);
5145 tree domain_min, domain_max, bitlength;
5147 /* The default implementation strategy is to extract the constant
5148 parts of the constructor, use that to initialize the target,
5149 and then "or" in whatever non-constant ranges we need in addition.
5151 If a large set is all zero or all ones, it is
5152 probably better to set it using memset (if available) or bzero.
5153 Also, if a large set has just a single range, it may also be
5154 better to first clear all the first clear the set (using
5155 bzero/memset), and set the bits we want. */
5157 /* Check for all zeros. */
5158 if (elt == NULL_TREE && size > 0)
5161 clear_storage (target, GEN_INT (size));
5165 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5166 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5167 bitlength = size_binop (PLUS_EXPR,
5168 size_diffop (domain_max, domain_min),
5171 nbits = tree_low_cst (bitlength, 1);
5173 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5174 are "complicated" (more than one range), initialize (the
5175 constant parts) by copying from a constant. */
5176 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5177 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5179 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5180 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5181 char *bit_buffer = (char *) alloca (nbits);
5182 HOST_WIDE_INT word = 0;
5183 unsigned int bit_pos = 0;
5184 unsigned int ibit = 0;
5185 unsigned int offset = 0; /* In bytes from beginning of set. */
5187 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5190 if (bit_buffer[ibit])
5192 if (BYTES_BIG_ENDIAN)
5193 word |= (1 << (set_word_size - 1 - bit_pos));
5195 word |= 1 << bit_pos;
5199 if (bit_pos >= set_word_size || ibit == nbits)
5201 if (word != 0 || ! cleared)
5203 rtx datum = GEN_INT (word);
5206 /* The assumption here is that it is safe to use
5207 XEXP if the set is multi-word, but not if
5208 it's single-word. */
5209 if (GET_CODE (target) == MEM)
5210 to_rtx = adjust_address (target, mode, offset);
5211 else if (offset == 0)
5215 emit_move_insn (to_rtx, datum);
5222 offset += set_word_size / BITS_PER_UNIT;
5227 /* Don't bother clearing storage if the set is all ones. */
5228 if (TREE_CHAIN (elt) != NULL_TREE
5229 || (TREE_PURPOSE (elt) == NULL_TREE
5231 : ( ! host_integerp (TREE_VALUE (elt), 0)
5232 || ! host_integerp (TREE_PURPOSE (elt), 0)
5233 || (tree_low_cst (TREE_VALUE (elt), 0)
5234 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5235 != (HOST_WIDE_INT) nbits))))
5236 clear_storage (target, expr_size (exp));
5238 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5240 /* Start of range of element or NULL. */
5241 tree startbit = TREE_PURPOSE (elt);
5242 /* End of range of element, or element value. */
5243 tree endbit = TREE_VALUE (elt);
5244 HOST_WIDE_INT startb, endb;
5245 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5247 bitlength_rtx = expand_expr (bitlength,
5248 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5250 /* Handle non-range tuple element like [ expr ]. */
5251 if (startbit == NULL_TREE)
5253 startbit = save_expr (endbit);
5257 startbit = convert (sizetype, startbit);
5258 endbit = convert (sizetype, endbit);
5259 if (! integer_zerop (domain_min))
5261 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5262 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5264 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5265 EXPAND_CONST_ADDRESS);
5266 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5267 EXPAND_CONST_ADDRESS);
5273 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5274 (GET_MODE (target), 0),
5277 emit_move_insn (targetx, target);
5280 else if (GET_CODE (target) == MEM)
5285 /* Optimization: If startbit and endbit are constants divisible
5286 by BITS_PER_UNIT, call memset instead. */
5287 if (TARGET_MEM_FUNCTIONS
5288 && TREE_CODE (startbit) == INTEGER_CST
5289 && TREE_CODE (endbit) == INTEGER_CST
5290 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5291 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5293 emit_library_call (memset_libfunc, LCT_NORMAL,
5295 plus_constant (XEXP (targetx, 0),
5296 startb / BITS_PER_UNIT),
5298 constm1_rtx, TYPE_MODE (integer_type_node),
5299 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5300 TYPE_MODE (sizetype));
5303 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5304 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5305 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5306 startbit_rtx, TYPE_MODE (sizetype),
5307 endbit_rtx, TYPE_MODE (sizetype));
5310 emit_move_insn (target, targetx);
5318 /* Store the value of EXP (an expression tree)
5319 into a subfield of TARGET which has mode MODE and occupies
5320 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5321 If MODE is VOIDmode, it means that we are storing into a bit-field.
5323 If VALUE_MODE is VOIDmode, return nothing in particular.
5324 UNSIGNEDP is not used in this case.
5326 Otherwise, return an rtx for the value stored. This rtx
5327 has mode VALUE_MODE if that is convenient to do.
5328 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5330 TYPE is the type of the underlying object,
5332 ALIAS_SET is the alias set for the destination. This value will
5333 (in general) be different from that for TARGET, since TARGET is a
5334 reference to the containing structure. */
5337 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5340 HOST_WIDE_INT bitsize;
5341 HOST_WIDE_INT bitpos;
5342 enum machine_mode mode;
5344 enum machine_mode value_mode;
5349 HOST_WIDE_INT width_mask = 0;
5351 if (TREE_CODE (exp) == ERROR_MARK)
5354 /* If we have nothing to store, do nothing unless the expression has
5357 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5358 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5359 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5361 /* If we are storing into an unaligned field of an aligned union that is
5362 in a register, we may have the mode of TARGET being an integer mode but
5363 MODE == BLKmode. In that case, get an aligned object whose size and
5364 alignment are the same as TARGET and store TARGET into it (we can avoid
5365 the store if the field being stored is the entire width of TARGET). Then
5366 call ourselves recursively to store the field into a BLKmode version of
5367 that object. Finally, load from the object into TARGET. This is not
5368 very efficient in general, but should only be slightly more expensive
5369 than the otherwise-required unaligned accesses. Perhaps this can be
5370 cleaned up later. */
5373 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5377 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5379 rtx blk_object = adjust_address (object, BLKmode, 0);
5381 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5382 emit_move_insn (object, target);
5384 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5387 emit_move_insn (target, object);
5389 /* We want to return the BLKmode version of the data. */
5393 if (GET_CODE (target) == CONCAT)
5395 /* We're storing into a struct containing a single __complex. */
5399 return store_expr (exp, target, 0);
5402 /* If the structure is in a register or if the component
5403 is a bit field, we cannot use addressing to access it.
5404 Use bit-field techniques or SUBREG to store in it. */
5406 if (mode == VOIDmode
5407 || (mode != BLKmode && ! direct_store[(int) mode]
5408 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5409 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5410 || GET_CODE (target) == REG
5411 || GET_CODE (target) == SUBREG
5412 /* If the field isn't aligned enough to store as an ordinary memref,
5413 store it as a bit field. */
5414 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5415 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5416 || bitpos % GET_MODE_ALIGNMENT (mode)))
5417 /* If the RHS and field are a constant size and the size of the
5418 RHS isn't the same size as the bitfield, we must use bitfield
5421 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5422 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5424 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5426 /* If BITSIZE is narrower than the size of the type of EXP
5427 we will be narrowing TEMP. Normally, what's wanted are the
5428 low-order bits. However, if EXP's type is a record and this is
5429 big-endian machine, we want the upper BITSIZE bits. */
5430 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5431 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5432 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5433 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5434 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5438 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5440 if (mode != VOIDmode && mode != BLKmode
5441 && mode != TYPE_MODE (TREE_TYPE (exp)))
5442 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5444 /* If the modes of TARGET and TEMP are both BLKmode, both
5445 must be in memory and BITPOS must be aligned on a byte
5446 boundary. If so, we simply do a block copy. */
5447 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5449 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5450 || bitpos % BITS_PER_UNIT != 0)
5453 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5454 emit_block_move (target, temp,
5455 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5459 return value_mode == VOIDmode ? const0_rtx : target;
5462 /* Store the value in the bitfield. */
5463 store_bit_field (target, bitsize, bitpos, mode, temp,
5464 int_size_in_bytes (type));
5466 if (value_mode != VOIDmode)
5468 /* The caller wants an rtx for the value.
5469 If possible, avoid refetching from the bitfield itself. */
5471 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5474 enum machine_mode tmode;
5476 tmode = GET_MODE (temp);
5477 if (tmode == VOIDmode)
5481 return expand_and (tmode, temp,
5482 gen_int_mode (width_mask, tmode),
5485 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5486 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5487 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5490 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5491 NULL_RTX, value_mode, VOIDmode,
5492 int_size_in_bytes (type));
5498 rtx addr = XEXP (target, 0);
5499 rtx to_rtx = target;
5501 /* If a value is wanted, it must be the lhs;
5502 so make the address stable for multiple use. */
5504 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5505 && ! CONSTANT_ADDRESS_P (addr)
5506 /* A frame-pointer reference is already stable. */
5507 && ! (GET_CODE (addr) == PLUS
5508 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5509 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5510 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5511 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5513 /* Now build a reference to just the desired component. */
5515 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5517 if (to_rtx == target)
5518 to_rtx = copy_rtx (to_rtx);
5520 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5521 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5522 set_mem_alias_set (to_rtx, alias_set);
5524 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5528 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5529 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5530 codes and find the ultimate containing object, which we return.
5532 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5533 bit position, and *PUNSIGNEDP to the signedness of the field.
5534 If the position of the field is variable, we store a tree
5535 giving the variable offset (in units) in *POFFSET.
5536 This offset is in addition to the bit position.
5537 If the position is not variable, we store 0 in *POFFSET.
5539 If any of the extraction expressions is volatile,
5540 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5542 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5543 is a mode that can be used to access the field. In that case, *PBITSIZE
5546 If the field describes a variable-sized object, *PMODE is set to
5547 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5548 this case, but the address of the object can be found. */
5551 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5552 punsignedp, pvolatilep)
5554 HOST_WIDE_INT *pbitsize;
5555 HOST_WIDE_INT *pbitpos;
5557 enum machine_mode *pmode;
5562 enum machine_mode mode = VOIDmode;
5563 tree offset = size_zero_node;
5564 tree bit_offset = bitsize_zero_node;
5565 tree placeholder_ptr = 0;
5568 /* First get the mode, signedness, and size. We do this from just the
5569 outermost expression. */
5570 if (TREE_CODE (exp) == COMPONENT_REF)
5572 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5573 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5574 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5576 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5578 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5580 size_tree = TREE_OPERAND (exp, 1);
5581 *punsignedp = TREE_UNSIGNED (exp);
5585 mode = TYPE_MODE (TREE_TYPE (exp));
5586 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5588 if (mode == BLKmode)
5589 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5591 *pbitsize = GET_MODE_BITSIZE (mode);
5596 if (! host_integerp (size_tree, 1))
5597 mode = BLKmode, *pbitsize = -1;
5599 *pbitsize = tree_low_cst (size_tree, 1);
5602 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5603 and find the ultimate containing object. */
5606 if (TREE_CODE (exp) == BIT_FIELD_REF)
5607 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5608 else if (TREE_CODE (exp) == COMPONENT_REF)
5610 tree field = TREE_OPERAND (exp, 1);
5611 tree this_offset = DECL_FIELD_OFFSET (field);
5613 /* If this field hasn't been filled in yet, don't go
5614 past it. This should only happen when folding expressions
5615 made during type construction. */
5616 if (this_offset == 0)
5618 else if (! TREE_CONSTANT (this_offset)
5619 && contains_placeholder_p (this_offset))
5620 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5622 offset = size_binop (PLUS_EXPR, offset, this_offset);
5623 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5624 DECL_FIELD_BIT_OFFSET (field));
5626 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5629 else if (TREE_CODE (exp) == ARRAY_REF
5630 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5632 tree index = TREE_OPERAND (exp, 1);
5633 tree array = TREE_OPERAND (exp, 0);
5634 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5635 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5636 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5638 /* We assume all arrays have sizes that are a multiple of a byte.
5639 First subtract the lower bound, if any, in the type of the
5640 index, then convert to sizetype and multiply by the size of the
5642 if (low_bound != 0 && ! integer_zerop (low_bound))
5643 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5646 /* If the index has a self-referential type, pass it to a
5647 WITH_RECORD_EXPR; if the component size is, pass our
5648 component to one. */
5649 if (! TREE_CONSTANT (index)
5650 && contains_placeholder_p (index))
5651 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5652 if (! TREE_CONSTANT (unit_size)
5653 && contains_placeholder_p (unit_size))
5654 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5656 offset = size_binop (PLUS_EXPR, offset,
5657 size_binop (MULT_EXPR,
5658 convert (sizetype, index),
5662 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5664 tree new = find_placeholder (exp, &placeholder_ptr);
5666 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5667 We might have been called from tree optimization where we
5668 haven't set up an object yet. */
5676 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5677 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5678 && ! ((TREE_CODE (exp) == NOP_EXPR
5679 || TREE_CODE (exp) == CONVERT_EXPR)
5680 && (TYPE_MODE (TREE_TYPE (exp))
5681 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5684 /* If any reference in the chain is volatile, the effect is volatile. */
5685 if (TREE_THIS_VOLATILE (exp))
5688 exp = TREE_OPERAND (exp, 0);
5691 /* If OFFSET is constant, see if we can return the whole thing as a
5692 constant bit position. Otherwise, split it up. */
5693 if (host_integerp (offset, 0)
5694 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5696 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5697 && host_integerp (tem, 0))
5698 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5700 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5706 /* Return 1 if T is an expression that get_inner_reference handles. */
5709 handled_component_p (t)
5712 switch (TREE_CODE (t))
5717 case ARRAY_RANGE_REF:
5718 case NON_LVALUE_EXPR:
5719 case VIEW_CONVERT_EXPR:
5724 return (TYPE_MODE (TREE_TYPE (t))
5725 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5732 /* Given an rtx VALUE that may contain additions and multiplications, return
5733 an equivalent value that just refers to a register, memory, or constant.
5734 This is done by generating instructions to perform the arithmetic and
5735 returning a pseudo-register containing the value.
5737 The returned value may be a REG, SUBREG, MEM or constant. */
5740 force_operand (value, target)
5744 /* Use subtarget as the target for operand 0 of a binary operation. */
5745 rtx subtarget = get_subtarget (target);
5746 enum rtx_code code = GET_CODE (value);
5748 /* Check for a PIC address load. */
5749 if ((code == PLUS || code == MINUS)
5750 && XEXP (value, 0) == pic_offset_table_rtx
5751 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5752 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5753 || GET_CODE (XEXP (value, 1)) == CONST))
5756 subtarget = gen_reg_rtx (GET_MODE (value));
5757 emit_move_insn (subtarget, value);
5761 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5764 target = gen_reg_rtx (GET_MODE (value));
5765 convert_move (target, force_operand (XEXP (value, 0), NULL),
5766 code == ZERO_EXTEND);
5770 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5772 op2 = XEXP (value, 1);
5773 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5775 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5778 op2 = negate_rtx (GET_MODE (value), op2);
5781 /* Check for an addition with OP2 a constant integer and our first
5782 operand a PLUS of a virtual register and something else. In that
5783 case, we want to emit the sum of the virtual register and the
5784 constant first and then add the other value. This allows virtual
5785 register instantiation to simply modify the constant rather than
5786 creating another one around this addition. */
5787 if (code == PLUS && GET_CODE (op2) == CONST_INT
5788 && GET_CODE (XEXP (value, 0)) == PLUS
5789 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5790 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5791 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5793 rtx temp = expand_simple_binop (GET_MODE (value), code,
5794 XEXP (XEXP (value, 0), 0), op2,
5795 subtarget, 0, OPTAB_LIB_WIDEN);
5796 return expand_simple_binop (GET_MODE (value), code, temp,
5797 force_operand (XEXP (XEXP (value,
5799 target, 0, OPTAB_LIB_WIDEN);
5802 op1 = force_operand (XEXP (value, 0), subtarget);
5803 op2 = force_operand (op2, NULL_RTX);
5807 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5809 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5810 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5811 target, 1, OPTAB_LIB_WIDEN);
5813 return expand_divmod (0,
5814 FLOAT_MODE_P (GET_MODE (value))
5815 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5816 GET_MODE (value), op1, op2, target, 0);
5819 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5823 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5827 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5831 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5832 target, 0, OPTAB_LIB_WIDEN);
5835 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5836 target, 1, OPTAB_LIB_WIDEN);
5839 if (GET_RTX_CLASS (code) == '1')
5841 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5842 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5845 #ifdef INSN_SCHEDULING
5846 /* On machines that have insn scheduling, we want all memory reference to be
5847 explicit, so we need to deal with such paradoxical SUBREGs. */
5848 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5849 && (GET_MODE_SIZE (GET_MODE (value))
5850 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5852 = simplify_gen_subreg (GET_MODE (value),
5853 force_reg (GET_MODE (SUBREG_REG (value)),
5854 force_operand (SUBREG_REG (value),
5856 GET_MODE (SUBREG_REG (value)),
5857 SUBREG_BYTE (value));
5863 /* Subroutine of expand_expr: return nonzero iff there is no way that
5864 EXP can reference X, which is being modified. TOP_P is nonzero if this
5865 call is going to be used to determine whether we need a temporary
5866 for EXP, as opposed to a recursive call to this function.
5868 It is always safe for this routine to return zero since it merely
5869 searches for optimization opportunities. */
5872 safe_from_p (x, exp, top_p)
5879 static tree save_expr_list;
5882 /* If EXP has varying size, we MUST use a target since we currently
5883 have no way of allocating temporaries of variable size
5884 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5885 So we assume here that something at a higher level has prevented a
5886 clash. This is somewhat bogus, but the best we can do. Only
5887 do this when X is BLKmode and when we are at the top level. */
5888 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5889 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5890 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5891 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5892 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5894 && GET_MODE (x) == BLKmode)
5895 /* If X is in the outgoing argument area, it is always safe. */
5896 || (GET_CODE (x) == MEM
5897 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5898 || (GET_CODE (XEXP (x, 0)) == PLUS
5899 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5902 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5903 find the underlying pseudo. */
5904 if (GET_CODE (x) == SUBREG)
5907 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5911 /* A SAVE_EXPR might appear many times in the expression passed to the
5912 top-level safe_from_p call, and if it has a complex subexpression,
5913 examining it multiple times could result in a combinatorial explosion.
5914 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5915 with optimization took about 28 minutes to compile -- even though it was
5916 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5917 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5918 we have processed. Note that the only test of top_p was above. */
5927 rtn = safe_from_p (x, exp, 0);
5929 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5930 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5935 /* Now look at our tree code and possibly recurse. */
5936 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5939 exp_rtl = DECL_RTL_IF_SET (exp);
5946 if (TREE_CODE (exp) == TREE_LIST)
5947 return ((TREE_VALUE (exp) == 0
5948 || safe_from_p (x, TREE_VALUE (exp), 0))
5949 && (TREE_CHAIN (exp) == 0
5950 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5951 else if (TREE_CODE (exp) == ERROR_MARK)
5952 return 1; /* An already-visited SAVE_EXPR? */
5957 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5961 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5962 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5966 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5967 the expression. If it is set, we conflict iff we are that rtx or
5968 both are in memory. Otherwise, we check all operands of the
5969 expression recursively. */
5971 switch (TREE_CODE (exp))
5974 /* If the operand is static or we are static, we can't conflict.
5975 Likewise if we don't conflict with the operand at all. */
5976 if (staticp (TREE_OPERAND (exp, 0))
5977 || TREE_STATIC (exp)
5978 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5981 /* Otherwise, the only way this can conflict is if we are taking
5982 the address of a DECL a that address if part of X, which is
5984 exp = TREE_OPERAND (exp, 0);
5987 if (!DECL_RTL_SET_P (exp)
5988 || GET_CODE (DECL_RTL (exp)) != MEM)
5991 exp_rtl = XEXP (DECL_RTL (exp), 0);
5996 if (GET_CODE (x) == MEM
5997 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5998 get_alias_set (exp)))
6003 /* Assume that the call will clobber all hard registers and
6005 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6006 || GET_CODE (x) == MEM)
6011 /* If a sequence exists, we would have to scan every instruction
6012 in the sequence to see if it was safe. This is probably not
6014 if (RTL_EXPR_SEQUENCE (exp))
6017 exp_rtl = RTL_EXPR_RTL (exp);
6020 case WITH_CLEANUP_EXPR:
6021 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6024 case CLEANUP_POINT_EXPR:
6025 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6028 exp_rtl = SAVE_EXPR_RTL (exp);
6032 /* If we've already scanned this, don't do it again. Otherwise,
6033 show we've scanned it and record for clearing the flag if we're
6035 if (TREE_PRIVATE (exp))
6038 TREE_PRIVATE (exp) = 1;
6039 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6041 TREE_PRIVATE (exp) = 0;
6045 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6049 /* The only operand we look at is operand 1. The rest aren't
6050 part of the expression. */
6051 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6053 case METHOD_CALL_EXPR:
6054 /* This takes an rtx argument, but shouldn't appear here. */
6061 /* If we have an rtx, we do not need to scan our operands. */
6065 nops = first_rtl_op (TREE_CODE (exp));
6066 for (i = 0; i < nops; i++)
6067 if (TREE_OPERAND (exp, i) != 0
6068 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6071 /* If this is a language-specific tree code, it may require
6072 special handling. */
6073 if ((unsigned int) TREE_CODE (exp)
6074 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6075 && !(*lang_hooks.safe_from_p) (x, exp))
6079 /* If we have an rtl, find any enclosed object. Then see if we conflict
6083 if (GET_CODE (exp_rtl) == SUBREG)
6085 exp_rtl = SUBREG_REG (exp_rtl);
6086 if (GET_CODE (exp_rtl) == REG
6087 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6091 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6092 are memory and they conflict. */
6093 return ! (rtx_equal_p (x, exp_rtl)
6094 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6095 && true_dependence (exp_rtl, VOIDmode, x,
6096 rtx_addr_varies_p)));
6099 /* If we reach here, it is safe. */
6103 /* Subroutine of expand_expr: return rtx if EXP is a
6104 variable or parameter; else return 0. */
6111 switch (TREE_CODE (exp))
6115 return DECL_RTL (exp);
6121 #ifdef MAX_INTEGER_COMPUTATION_MODE
6124 check_max_integer_computation_mode (exp)
6127 enum tree_code code;
6128 enum machine_mode mode;
6130 /* Strip any NOPs that don't change the mode. */
6132 code = TREE_CODE (exp);
6134 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6135 if (code == NOP_EXPR
6136 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6139 /* First check the type of the overall operation. We need only look at
6140 unary, binary and relational operations. */
6141 if (TREE_CODE_CLASS (code) == '1'
6142 || TREE_CODE_CLASS (code) == '2'
6143 || TREE_CODE_CLASS (code) == '<')
6145 mode = TYPE_MODE (TREE_TYPE (exp));
6146 if (GET_MODE_CLASS (mode) == MODE_INT
6147 && mode > MAX_INTEGER_COMPUTATION_MODE)
6148 internal_error ("unsupported wide integer operation");
6151 /* Check operand of a unary op. */
6152 if (TREE_CODE_CLASS (code) == '1')
6154 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6155 if (GET_MODE_CLASS (mode) == MODE_INT
6156 && mode > MAX_INTEGER_COMPUTATION_MODE)
6157 internal_error ("unsupported wide integer operation");
6160 /* Check operands of a binary/comparison op. */
6161 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6163 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6164 if (GET_MODE_CLASS (mode) == MODE_INT
6165 && mode > MAX_INTEGER_COMPUTATION_MODE)
6166 internal_error ("unsupported wide integer operation");
6168 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6169 if (GET_MODE_CLASS (mode) == MODE_INT
6170 && mode > MAX_INTEGER_COMPUTATION_MODE)
6171 internal_error ("unsupported wide integer operation");
6176 /* Return the highest power of two that EXP is known to be a multiple of.
6177 This is used in updating alignment of MEMs in array references. */
6179 static HOST_WIDE_INT
6180 highest_pow2_factor (exp)
6183 HOST_WIDE_INT c0, c1;
6185 switch (TREE_CODE (exp))
6188 /* We can find the lowest bit that's a one. If the low
6189 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6190 We need to handle this case since we can find it in a COND_EXPR,
6191 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6192 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6194 if (TREE_CONSTANT_OVERFLOW (exp))
6195 return BIGGEST_ALIGNMENT;
6198 /* Note: tree_low_cst is intentionally not used here,
6199 we don't care about the upper bits. */
6200 c0 = TREE_INT_CST_LOW (exp);
6202 return c0 ? c0 : BIGGEST_ALIGNMENT;
6206 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6207 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6208 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6209 return MIN (c0, c1);
6212 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6213 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6216 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6218 if (integer_pow2p (TREE_OPERAND (exp, 1))
6219 && host_integerp (TREE_OPERAND (exp, 1), 1))
6221 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6222 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6223 return MAX (1, c0 / c1);
6227 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6228 case SAVE_EXPR: case WITH_RECORD_EXPR:
6229 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6232 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6235 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6236 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6237 return MIN (c0, c1);
6246 /* Similar, except that it is known that the expression must be a multiple
6247 of the alignment of TYPE. */
6249 static HOST_WIDE_INT
6250 highest_pow2_factor_for_type (type, exp)
6254 HOST_WIDE_INT type_align, factor;
6256 factor = highest_pow2_factor (exp);
6257 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6258 return MAX (factor, type_align);
6261 /* Return an object on the placeholder list that matches EXP, a
6262 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6263 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6264 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6265 is a location which initially points to a starting location in the
6266 placeholder list (zero means start of the list) and where a pointer into
6267 the placeholder list at which the object is found is placed. */
6270 find_placeholder (exp, plist)
6274 tree type = TREE_TYPE (exp);
6275 tree placeholder_expr;
6277 for (placeholder_expr
6278 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6279 placeholder_expr != 0;
6280 placeholder_expr = TREE_CHAIN (placeholder_expr))
6282 tree need_type = TYPE_MAIN_VARIANT (type);
6285 /* Find the outermost reference that is of the type we want. If none,
6286 see if any object has a type that is a pointer to the type we
6288 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6289 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6290 || TREE_CODE (elt) == COND_EXPR)
6291 ? TREE_OPERAND (elt, 1)
6292 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6293 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6294 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6295 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6296 ? TREE_OPERAND (elt, 0) : 0))
6297 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6300 *plist = placeholder_expr;
6304 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6306 = ((TREE_CODE (elt) == COMPOUND_EXPR
6307 || TREE_CODE (elt) == COND_EXPR)
6308 ? TREE_OPERAND (elt, 1)
6309 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6310 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6311 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6312 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6313 ? TREE_OPERAND (elt, 0) : 0))
6314 if (POINTER_TYPE_P (TREE_TYPE (elt))
6315 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6319 *plist = placeholder_expr;
6320 return build1 (INDIRECT_REF, need_type, elt);
6327 /* expand_expr: generate code for computing expression EXP.
6328 An rtx for the computed value is returned. The value is never null.
6329 In the case of a void EXP, const0_rtx is returned.
6331 The value may be stored in TARGET if TARGET is nonzero.
6332 TARGET is just a suggestion; callers must assume that
6333 the rtx returned may not be the same as TARGET.
6335 If TARGET is CONST0_RTX, it means that the value will be ignored.
6337 If TMODE is not VOIDmode, it suggests generating the
6338 result in mode TMODE. But this is done only when convenient.
6339 Otherwise, TMODE is ignored and the value generated in its natural mode.
6340 TMODE is just a suggestion; callers must assume that
6341 the rtx returned may not have mode TMODE.
6343 Note that TARGET may have neither TMODE nor MODE. In that case, it
6344 probably will not be used.
6346 If MODIFIER is EXPAND_SUM then when EXP is an addition
6347 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6348 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6349 products as above, or REG or MEM, or constant.
6350 Ordinarily in such cases we would output mul or add instructions
6351 and then return a pseudo reg containing the sum.
6353 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6354 it also marks a label as absolutely required (it can't be dead).
6355 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6356 This is used for outputting expressions used in initializers.
6358 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6359 with a constant address even if that address is not normally legitimate.
6360 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6363 expand_expr (exp, target, tmode, modifier)
6366 enum machine_mode tmode;
6367 enum expand_modifier modifier;
6370 tree type = TREE_TYPE (exp);
6371 int unsignedp = TREE_UNSIGNED (type);
6372 enum machine_mode mode;
6373 enum tree_code code = TREE_CODE (exp);
6375 rtx subtarget, original_target;
6379 /* Handle ERROR_MARK before anybody tries to access its type. */
6380 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6382 op0 = CONST0_RTX (tmode);
6388 mode = TYPE_MODE (type);
6389 /* Use subtarget as the target for operand 0 of a binary operation. */
6390 subtarget = get_subtarget (target);
6391 original_target = target;
6392 ignore = (target == const0_rtx
6393 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6394 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6395 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6396 && TREE_CODE (type) == VOID_TYPE));
6398 /* If we are going to ignore this result, we need only do something
6399 if there is a side-effect somewhere in the expression. If there
6400 is, short-circuit the most common cases here. Note that we must
6401 not call expand_expr with anything but const0_rtx in case this
6402 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6406 if (! TREE_SIDE_EFFECTS (exp))
6409 /* Ensure we reference a volatile object even if value is ignored, but
6410 don't do this if all we are doing is taking its address. */
6411 if (TREE_THIS_VOLATILE (exp)
6412 && TREE_CODE (exp) != FUNCTION_DECL
6413 && mode != VOIDmode && mode != BLKmode
6414 && modifier != EXPAND_CONST_ADDRESS)
6416 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6417 if (GET_CODE (temp) == MEM)
6418 temp = copy_to_reg (temp);
6422 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6423 || code == INDIRECT_REF || code == BUFFER_REF)
6424 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6427 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6428 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6430 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6431 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6434 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6435 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6436 /* If the second operand has no side effects, just evaluate
6438 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6440 else if (code == BIT_FIELD_REF)
6442 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6443 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6444 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6451 #ifdef MAX_INTEGER_COMPUTATION_MODE
6452 /* Only check stuff here if the mode we want is different from the mode
6453 of the expression; if it's the same, check_max_integer_computiation_mode
6454 will handle it. Do we really need to check this stuff at all? */
6457 && GET_MODE (target) != mode
6458 && TREE_CODE (exp) != INTEGER_CST
6459 && TREE_CODE (exp) != PARM_DECL
6460 && TREE_CODE (exp) != ARRAY_REF
6461 && TREE_CODE (exp) != ARRAY_RANGE_REF
6462 && TREE_CODE (exp) != COMPONENT_REF
6463 && TREE_CODE (exp) != BIT_FIELD_REF
6464 && TREE_CODE (exp) != INDIRECT_REF
6465 && TREE_CODE (exp) != CALL_EXPR
6466 && TREE_CODE (exp) != VAR_DECL
6467 && TREE_CODE (exp) != RTL_EXPR)
6469 enum machine_mode mode = GET_MODE (target);
6471 if (GET_MODE_CLASS (mode) == MODE_INT
6472 && mode > MAX_INTEGER_COMPUTATION_MODE)
6473 internal_error ("unsupported wide integer operation");
6477 && TREE_CODE (exp) != INTEGER_CST
6478 && TREE_CODE (exp) != PARM_DECL
6479 && TREE_CODE (exp) != ARRAY_REF
6480 && TREE_CODE (exp) != ARRAY_RANGE_REF
6481 && TREE_CODE (exp) != COMPONENT_REF
6482 && TREE_CODE (exp) != BIT_FIELD_REF
6483 && TREE_CODE (exp) != INDIRECT_REF
6484 && TREE_CODE (exp) != VAR_DECL
6485 && TREE_CODE (exp) != CALL_EXPR
6486 && TREE_CODE (exp) != RTL_EXPR
6487 && GET_MODE_CLASS (tmode) == MODE_INT
6488 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6489 internal_error ("unsupported wide integer operation");
6491 check_max_integer_computation_mode (exp);
6494 /* If will do cse, generate all results into pseudo registers
6495 since 1) that allows cse to find more things
6496 and 2) otherwise cse could produce an insn the machine
6497 cannot support. And exception is a CONSTRUCTOR into a multi-word
6498 MEM: that's much more likely to be most efficient into the MEM. */
6500 if (! cse_not_expected && mode != BLKmode && target
6501 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6502 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6509 tree function = decl_function_context (exp);
6510 /* Handle using a label in a containing function. */
6511 if (function != current_function_decl
6512 && function != inline_function_decl && function != 0)
6514 struct function *p = find_function_data (function);
6515 p->expr->x_forced_labels
6516 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6517 p->expr->x_forced_labels);
6521 if (modifier == EXPAND_INITIALIZER)
6522 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6527 temp = gen_rtx_MEM (FUNCTION_MODE,
6528 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6529 if (function != current_function_decl
6530 && function != inline_function_decl && function != 0)
6531 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6536 if (!DECL_RTL_SET_P (exp))
6538 error_with_decl (exp, "prior parameter's size depends on `%s'");
6539 return CONST0_RTX (mode);
6542 /* ... fall through ... */
6545 /* If a static var's type was incomplete when the decl was written,
6546 but the type is complete now, lay out the decl now. */
6547 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6548 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6550 rtx value = DECL_RTL_IF_SET (exp);
6552 layout_decl (exp, 0);
6554 /* If the RTL was already set, update its mode and memory
6558 PUT_MODE (value, DECL_MODE (exp));
6559 SET_DECL_RTL (exp, 0);
6560 set_mem_attributes (value, exp, 1);
6561 SET_DECL_RTL (exp, value);
6565 /* ... fall through ... */
6569 if (DECL_RTL (exp) == 0)
6572 /* Ensure variable marked as used even if it doesn't go through
6573 a parser. If it hasn't be used yet, write out an external
6575 if (! TREE_USED (exp))
6577 assemble_external (exp);
6578 TREE_USED (exp) = 1;
6581 /* Show we haven't gotten RTL for this yet. */
6584 /* Handle variables inherited from containing functions. */
6585 context = decl_function_context (exp);
6587 /* We treat inline_function_decl as an alias for the current function
6588 because that is the inline function whose vars, types, etc.
6589 are being merged into the current function.
6590 See expand_inline_function. */
6592 if (context != 0 && context != current_function_decl
6593 && context != inline_function_decl
6594 /* If var is static, we don't need a static chain to access it. */
6595 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6596 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6600 /* Mark as non-local and addressable. */
6601 DECL_NONLOCAL (exp) = 1;
6602 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6604 (*lang_hooks.mark_addressable) (exp);
6605 if (GET_CODE (DECL_RTL (exp)) != MEM)
6607 addr = XEXP (DECL_RTL (exp), 0);
6608 if (GET_CODE (addr) == MEM)
6610 = replace_equiv_address (addr,
6611 fix_lexical_addr (XEXP (addr, 0), exp));
6613 addr = fix_lexical_addr (addr, exp);
6615 temp = replace_equiv_address (DECL_RTL (exp), addr);
6618 /* This is the case of an array whose size is to be determined
6619 from its initializer, while the initializer is still being parsed.
6622 else if (GET_CODE (DECL_RTL (exp)) == MEM
6623 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6624 temp = validize_mem (DECL_RTL (exp));
6626 /* If DECL_RTL is memory, we are in the normal case and either
6627 the address is not valid or it is not a register and -fforce-addr
6628 is specified, get the address into a register. */
6630 else if (GET_CODE (DECL_RTL (exp)) == MEM
6631 && modifier != EXPAND_CONST_ADDRESS
6632 && modifier != EXPAND_SUM
6633 && modifier != EXPAND_INITIALIZER
6634 && (! memory_address_p (DECL_MODE (exp),
6635 XEXP (DECL_RTL (exp), 0))
6637 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6638 temp = replace_equiv_address (DECL_RTL (exp),
6639 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6641 /* If we got something, return it. But first, set the alignment
6642 if the address is a register. */
6645 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6646 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6651 /* If the mode of DECL_RTL does not match that of the decl, it
6652 must be a promoted value. We return a SUBREG of the wanted mode,
6653 but mark it so that we know that it was already extended. */
6655 if (GET_CODE (DECL_RTL (exp)) == REG
6656 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6658 /* Get the signedness used for this variable. Ensure we get the
6659 same mode we got when the variable was declared. */
6660 if (GET_MODE (DECL_RTL (exp))
6661 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6662 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6665 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6666 SUBREG_PROMOTED_VAR_P (temp) = 1;
6667 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6671 return DECL_RTL (exp);
6674 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6675 TREE_INT_CST_HIGH (exp), mode);
6677 /* ??? If overflow is set, fold will have done an incomplete job,
6678 which can result in (plus xx (const_int 0)), which can get
6679 simplified by validate_replace_rtx during virtual register
6680 instantiation, which can result in unrecognizable insns.
6681 Avoid this by forcing all overflows into registers. */
6682 if (TREE_CONSTANT_OVERFLOW (exp)
6683 && modifier != EXPAND_INITIALIZER)
6684 temp = force_reg (mode, temp);
6689 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6692 /* If optimized, generate immediate CONST_DOUBLE
6693 which will be turned into memory by reload if necessary.
6695 We used to force a register so that loop.c could see it. But
6696 this does not allow gen_* patterns to perform optimizations with
6697 the constants. It also produces two insns in cases like "x = 1.0;".
6698 On most machines, floating-point constants are not permitted in
6699 many insns, so we'd end up copying it to a register in any case.
6701 Now, we do the copying in expand_binop, if appropriate. */
6702 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6703 TYPE_MODE (TREE_TYPE (exp)));
6707 if (! TREE_CST_RTL (exp))
6708 output_constant_def (exp, 1);
6710 /* TREE_CST_RTL probably contains a constant address.
6711 On RISC machines where a constant address isn't valid,
6712 make some insns to get that address into a register. */
6713 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6714 && modifier != EXPAND_CONST_ADDRESS
6715 && modifier != EXPAND_INITIALIZER
6716 && modifier != EXPAND_SUM
6717 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6719 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6720 return replace_equiv_address (TREE_CST_RTL (exp),
6721 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6722 return TREE_CST_RTL (exp);
6724 case EXPR_WITH_FILE_LOCATION:
6727 const char *saved_input_filename = input_filename;
6728 int saved_lineno = lineno;
6729 input_filename = EXPR_WFL_FILENAME (exp);
6730 lineno = EXPR_WFL_LINENO (exp);
6731 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6732 emit_line_note (input_filename, lineno);
6733 /* Possibly avoid switching back and forth here. */
6734 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6735 input_filename = saved_input_filename;
6736 lineno = saved_lineno;
6741 context = decl_function_context (exp);
6743 /* If this SAVE_EXPR was at global context, assume we are an
6744 initialization function and move it into our context. */
6746 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6748 /* We treat inline_function_decl as an alias for the current function
6749 because that is the inline function whose vars, types, etc.
6750 are being merged into the current function.
6751 See expand_inline_function. */
6752 if (context == current_function_decl || context == inline_function_decl)
6755 /* If this is non-local, handle it. */
6758 /* The following call just exists to abort if the context is
6759 not of a containing function. */
6760 find_function_data (context);
6762 temp = SAVE_EXPR_RTL (exp);
6763 if (temp && GET_CODE (temp) == REG)
6765 put_var_into_stack (exp);
6766 temp = SAVE_EXPR_RTL (exp);
6768 if (temp == 0 || GET_CODE (temp) != MEM)
6771 replace_equiv_address (temp,
6772 fix_lexical_addr (XEXP (temp, 0), exp));
6774 if (SAVE_EXPR_RTL (exp) == 0)
6776 if (mode == VOIDmode)
6779 temp = assign_temp (build_qualified_type (type,
6781 | TYPE_QUAL_CONST)),
6784 SAVE_EXPR_RTL (exp) = temp;
6785 if (!optimize && GET_CODE (temp) == REG)
6786 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6789 /* If the mode of TEMP does not match that of the expression, it
6790 must be a promoted value. We pass store_expr a SUBREG of the
6791 wanted mode but mark it so that we know that it was already
6792 extended. Note that `unsignedp' was modified above in
6795 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6797 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6798 SUBREG_PROMOTED_VAR_P (temp) = 1;
6799 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6802 if (temp == const0_rtx)
6803 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6805 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6807 TREE_USED (exp) = 1;
6810 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6811 must be a promoted value. We return a SUBREG of the wanted mode,
6812 but mark it so that we know that it was already extended. */
6814 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6815 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6817 /* Compute the signedness and make the proper SUBREG. */
6818 promote_mode (type, mode, &unsignedp, 0);
6819 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6820 SUBREG_PROMOTED_VAR_P (temp) = 1;
6821 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6825 return SAVE_EXPR_RTL (exp);
6830 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6831 TREE_OPERAND (exp, 0)
6832 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6836 case PLACEHOLDER_EXPR:
6838 tree old_list = placeholder_list;
6839 tree placeholder_expr = 0;
6841 exp = find_placeholder (exp, &placeholder_expr);
6845 placeholder_list = TREE_CHAIN (placeholder_expr);
6846 temp = expand_expr (exp, original_target, tmode, modifier);
6847 placeholder_list = old_list;
6851 case WITH_RECORD_EXPR:
6852 /* Put the object on the placeholder list, expand our first operand,
6853 and pop the list. */
6854 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6856 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6858 placeholder_list = TREE_CHAIN (placeholder_list);
6862 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6863 expand_goto (TREE_OPERAND (exp, 0));
6865 expand_computed_goto (TREE_OPERAND (exp, 0));
6869 expand_exit_loop_if_false (NULL,
6870 invert_truthvalue (TREE_OPERAND (exp, 0)));
6873 case LABELED_BLOCK_EXPR:
6874 if (LABELED_BLOCK_BODY (exp))
6875 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6876 /* Should perhaps use expand_label, but this is simpler and safer. */
6877 do_pending_stack_adjust ();
6878 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6881 case EXIT_BLOCK_EXPR:
6882 if (EXIT_BLOCK_RETURN (exp))
6883 sorry ("returned value in block_exit_expr");
6884 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6889 expand_start_loop (1);
6890 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6898 tree vars = TREE_OPERAND (exp, 0);
6899 int vars_need_expansion = 0;
6901 /* Need to open a binding contour here because
6902 if there are any cleanups they must be contained here. */
6903 expand_start_bindings (2);
6905 /* Mark the corresponding BLOCK for output in its proper place. */
6906 if (TREE_OPERAND (exp, 2) != 0
6907 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6908 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6910 /* If VARS have not yet been expanded, expand them now. */
6913 if (!DECL_RTL_SET_P (vars))
6915 vars_need_expansion = 1;
6918 expand_decl_init (vars);
6919 vars = TREE_CHAIN (vars);
6922 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6924 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6930 if (RTL_EXPR_SEQUENCE (exp))
6932 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6934 emit_insn (RTL_EXPR_SEQUENCE (exp));
6935 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6937 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6938 free_temps_for_rtl_expr (exp);
6939 return RTL_EXPR_RTL (exp);
6942 /* If we don't need the result, just ensure we evaluate any
6948 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6949 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6954 /* All elts simple constants => refer to a constant in memory. But
6955 if this is a non-BLKmode mode, let it store a field at a time
6956 since that should make a CONST_INT or CONST_DOUBLE when we
6957 fold. Likewise, if we have a target we can use, it is best to
6958 store directly into the target unless the type is large enough
6959 that memcpy will be used. If we are making an initializer and
6960 all operands are constant, put it in memory as well.
6962 FIXME: Avoid trying to fill vector constructors piece-meal.
6963 Output them with output_constant_def below unless we're sure
6964 they're zeros. This should go away when vector initializers
6965 are treated like VECTOR_CST instead of arrays.
6967 else if ((TREE_STATIC (exp)
6968 && ((mode == BLKmode
6969 && ! (target != 0 && safe_from_p (target, exp, 1)))
6970 || TREE_ADDRESSABLE (exp)
6971 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6972 && (! MOVE_BY_PIECES_P
6973 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6975 && ((TREE_CODE (type) == VECTOR_TYPE
6976 && !is_zeros_p (exp))
6977 || ! mostly_zeros_p (exp)))))
6978 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6980 rtx constructor = output_constant_def (exp, 1);
6982 if (modifier != EXPAND_CONST_ADDRESS
6983 && modifier != EXPAND_INITIALIZER
6984 && modifier != EXPAND_SUM)
6985 constructor = validize_mem (constructor);
6991 /* Handle calls that pass values in multiple non-contiguous
6992 locations. The Irix 6 ABI has examples of this. */
6993 if (target == 0 || ! safe_from_p (target, exp, 1)
6994 || GET_CODE (target) == PARALLEL)
6996 = assign_temp (build_qualified_type (type,
6998 | (TREE_READONLY (exp)
6999 * TYPE_QUAL_CONST))),
7000 0, TREE_ADDRESSABLE (exp), 1);
7002 store_constructor (exp, target, 0, int_expr_size (exp));
7008 tree exp1 = TREE_OPERAND (exp, 0);
7010 tree string = string_constant (exp1, &index);
7012 /* Try to optimize reads from const strings. */
7014 && TREE_CODE (string) == STRING_CST
7015 && TREE_CODE (index) == INTEGER_CST
7016 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7017 && GET_MODE_CLASS (mode) == MODE_INT
7018 && GET_MODE_SIZE (mode) == 1
7019 && modifier != EXPAND_WRITE)
7020 return gen_int_mode (TREE_STRING_POINTER (string)
7021 [TREE_INT_CST_LOW (index)], mode);
7023 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7024 op0 = memory_address (mode, op0);
7025 temp = gen_rtx_MEM (mode, op0);
7026 set_mem_attributes (temp, exp, 0);
7028 /* If we are writing to this object and its type is a record with
7029 readonly fields, we must mark it as readonly so it will
7030 conflict with readonly references to those fields. */
7031 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7032 RTX_UNCHANGING_P (temp) = 1;
7038 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7042 tree array = TREE_OPERAND (exp, 0);
7043 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7044 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7045 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7048 /* Optimize the special-case of a zero lower bound.
7050 We convert the low_bound to sizetype to avoid some problems
7051 with constant folding. (E.g. suppose the lower bound is 1,
7052 and its mode is QI. Without the conversion, (ARRAY
7053 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7054 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7056 if (! integer_zerop (low_bound))
7057 index = size_diffop (index, convert (sizetype, low_bound));
7059 /* Fold an expression like: "foo"[2].
7060 This is not done in fold so it won't happen inside &.
7061 Don't fold if this is for wide characters since it's too
7062 difficult to do correctly and this is a very rare case. */
7064 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7065 && TREE_CODE (array) == STRING_CST
7066 && TREE_CODE (index) == INTEGER_CST
7067 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7068 && GET_MODE_CLASS (mode) == MODE_INT
7069 && GET_MODE_SIZE (mode) == 1)
7070 return gen_int_mode (TREE_STRING_POINTER (array)
7071 [TREE_INT_CST_LOW (index)], mode);
7073 /* If this is a constant index into a constant array,
7074 just get the value from the array. Handle both the cases when
7075 we have an explicit constructor and when our operand is a variable
7076 that was declared const. */
7078 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7079 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7080 && TREE_CODE (index) == INTEGER_CST
7081 && 0 > compare_tree_int (index,
7082 list_length (CONSTRUCTOR_ELTS
7083 (TREE_OPERAND (exp, 0)))))
7087 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7088 i = TREE_INT_CST_LOW (index);
7089 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7093 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7097 else if (optimize >= 1
7098 && modifier != EXPAND_CONST_ADDRESS
7099 && modifier != EXPAND_INITIALIZER
7100 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7101 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7102 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7104 if (TREE_CODE (index) == INTEGER_CST)
7106 tree init = DECL_INITIAL (array);
7108 if (TREE_CODE (init) == CONSTRUCTOR)
7112 for (elem = CONSTRUCTOR_ELTS (init);
7114 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7115 elem = TREE_CHAIN (elem))
7118 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7119 return expand_expr (fold (TREE_VALUE (elem)), target,
7122 else if (TREE_CODE (init) == STRING_CST
7123 && 0 > compare_tree_int (index,
7124 TREE_STRING_LENGTH (init)))
7126 tree type = TREE_TYPE (TREE_TYPE (init));
7127 enum machine_mode mode = TYPE_MODE (type);
7129 if (GET_MODE_CLASS (mode) == MODE_INT
7130 && GET_MODE_SIZE (mode) == 1)
7131 return gen_int_mode (TREE_STRING_POINTER (init)
7132 [TREE_INT_CST_LOW (index)], mode);
7141 case ARRAY_RANGE_REF:
7142 /* If the operand is a CONSTRUCTOR, we can just extract the
7143 appropriate field if it is present. Don't do this if we have
7144 already written the data since we want to refer to that copy
7145 and varasm.c assumes that's what we'll do. */
7146 if (code == COMPONENT_REF
7147 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7148 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7152 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7153 elt = TREE_CHAIN (elt))
7154 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7155 /* We can normally use the value of the field in the
7156 CONSTRUCTOR. However, if this is a bitfield in
7157 an integral mode that we can fit in a HOST_WIDE_INT,
7158 we must mask only the number of bits in the bitfield,
7159 since this is done implicitly by the constructor. If
7160 the bitfield does not meet either of those conditions,
7161 we can't do this optimization. */
7162 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7163 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7165 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7166 <= HOST_BITS_PER_WIDE_INT))))
7168 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7169 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7171 HOST_WIDE_INT bitsize
7172 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7173 enum machine_mode imode
7174 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7176 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7178 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7179 op0 = expand_and (imode, op0, op1, target);
7184 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7187 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7189 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7199 enum machine_mode mode1;
7200 HOST_WIDE_INT bitsize, bitpos;
7203 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7204 &mode1, &unsignedp, &volatilep);
7207 /* If we got back the original object, something is wrong. Perhaps
7208 we are evaluating an expression too early. In any event, don't
7209 infinitely recurse. */
7213 /* If TEM's type is a union of variable size, pass TARGET to the inner
7214 computation, since it will need a temporary and TARGET is known
7215 to have to do. This occurs in unchecked conversion in Ada. */
7219 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7220 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7222 ? target : NULL_RTX),
7224 (modifier == EXPAND_INITIALIZER
7225 || modifier == EXPAND_CONST_ADDRESS)
7226 ? modifier : EXPAND_NORMAL);
7228 /* If this is a constant, put it into a register if it is a
7229 legitimate constant and OFFSET is 0 and memory if it isn't. */
7230 if (CONSTANT_P (op0))
7232 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7233 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7235 op0 = force_reg (mode, op0);
7237 op0 = validize_mem (force_const_mem (mode, op0));
7242 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7244 /* If this object is in a register, put it into memory.
7245 This case can't occur in C, but can in Ada if we have
7246 unchecked conversion of an expression from a scalar type to
7247 an array or record type. */
7248 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7249 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7251 /* If the operand is a SAVE_EXPR, we can deal with this by
7252 forcing the SAVE_EXPR into memory. */
7253 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7255 put_var_into_stack (TREE_OPERAND (exp, 0));
7256 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7261 = build_qualified_type (TREE_TYPE (tem),
7262 (TYPE_QUALS (TREE_TYPE (tem))
7263 | TYPE_QUAL_CONST));
7264 rtx memloc = assign_temp (nt, 1, 1, 1);
7266 emit_move_insn (memloc, op0);
7271 if (GET_CODE (op0) != MEM)
7274 #ifdef POINTERS_EXTEND_UNSIGNED
7275 if (GET_MODE (offset_rtx) != Pmode)
7276 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7278 if (GET_MODE (offset_rtx) != ptr_mode)
7279 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7282 /* A constant address in OP0 can have VOIDmode, we must not try
7283 to call force_reg for that case. Avoid that case. */
7284 if (GET_CODE (op0) == MEM
7285 && GET_MODE (op0) == BLKmode
7286 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7288 && (bitpos % bitsize) == 0
7289 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7290 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7292 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7296 op0 = offset_address (op0, offset_rtx,
7297 highest_pow2_factor (offset));
7300 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7301 record its alignment as BIGGEST_ALIGNMENT. */
7302 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7303 && is_aligning_offset (offset, tem))
7304 set_mem_align (op0, BIGGEST_ALIGNMENT);
7306 /* Don't forget about volatility even if this is a bitfield. */
7307 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7309 if (op0 == orig_op0)
7310 op0 = copy_rtx (op0);
7312 MEM_VOLATILE_P (op0) = 1;
7315 /* The following code doesn't handle CONCAT.
7316 Assume only bitpos == 0 can be used for CONCAT, due to
7317 one element arrays having the same mode as its element. */
7318 if (GET_CODE (op0) == CONCAT)
7320 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7325 /* In cases where an aligned union has an unaligned object
7326 as a field, we might be extracting a BLKmode value from
7327 an integer-mode (e.g., SImode) object. Handle this case
7328 by doing the extract into an object as wide as the field
7329 (which we know to be the width of a basic mode), then
7330 storing into memory, and changing the mode to BLKmode. */
7331 if (mode1 == VOIDmode
7332 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7333 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7334 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7335 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7336 && modifier != EXPAND_CONST_ADDRESS
7337 && modifier != EXPAND_INITIALIZER)
7338 /* If the field isn't aligned enough to fetch as a memref,
7339 fetch it as a bit field. */
7340 || (mode1 != BLKmode
7341 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7342 && ((TYPE_ALIGN (TREE_TYPE (tem))
7343 < GET_MODE_ALIGNMENT (mode))
7344 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7345 /* If the type and the field are a constant size and the
7346 size of the type isn't the same size as the bitfield,
7347 we must use bitfield operations. */
7349 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7351 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7354 enum machine_mode ext_mode = mode;
7356 if (ext_mode == BLKmode
7357 && ! (target != 0 && GET_CODE (op0) == MEM
7358 && GET_CODE (target) == MEM
7359 && bitpos % BITS_PER_UNIT == 0))
7360 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7362 if (ext_mode == BLKmode)
7364 /* In this case, BITPOS must start at a byte boundary and
7365 TARGET, if specified, must be a MEM. */
7366 if (GET_CODE (op0) != MEM
7367 || (target != 0 && GET_CODE (target) != MEM)
7368 || bitpos % BITS_PER_UNIT != 0)
7371 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7373 target = assign_temp (type, 0, 1, 1);
7375 emit_block_move (target, op0,
7376 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7383 op0 = validize_mem (op0);
7385 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7386 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7388 op0 = extract_bit_field (op0, bitsize, bitpos,
7389 unsignedp, target, ext_mode, ext_mode,
7390 int_size_in_bytes (TREE_TYPE (tem)));
7392 /* If the result is a record type and BITSIZE is narrower than
7393 the mode of OP0, an integral mode, and this is a big endian
7394 machine, we must put the field into the high-order bits. */
7395 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7396 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7397 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7398 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7399 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7403 if (mode == BLKmode)
7405 rtx new = assign_temp (build_qualified_type
7406 ((*lang_hooks.types.type_for_mode)
7408 TYPE_QUAL_CONST), 0, 1, 1);
7410 emit_move_insn (new, op0);
7411 op0 = copy_rtx (new);
7412 PUT_MODE (op0, BLKmode);
7413 set_mem_attributes (op0, exp, 1);
7419 /* If the result is BLKmode, use that to access the object
7421 if (mode == BLKmode)
7424 /* Get a reference to just this component. */
7425 if (modifier == EXPAND_CONST_ADDRESS
7426 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7427 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7429 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7431 if (op0 == orig_op0)
7432 op0 = copy_rtx (op0);
7434 set_mem_attributes (op0, exp, 0);
7435 if (GET_CODE (XEXP (op0, 0)) == REG)
7436 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7438 MEM_VOLATILE_P (op0) |= volatilep;
7439 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7440 || modifier == EXPAND_CONST_ADDRESS
7441 || modifier == EXPAND_INITIALIZER)
7443 else if (target == 0)
7444 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7446 convert_move (target, op0, unsignedp);
7452 rtx insn, before = get_last_insn (), vtbl_ref;
7454 /* Evaluate the interior expression. */
7455 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7458 /* Get or create an instruction off which to hang a note. */
7459 if (REG_P (subtarget))
7462 insn = get_last_insn ();
7465 if (! INSN_P (insn))
7466 insn = prev_nonnote_insn (insn);
7470 target = gen_reg_rtx (GET_MODE (subtarget));
7471 insn = emit_move_insn (target, subtarget);
7474 /* Collect the data for the note. */
7475 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7476 vtbl_ref = plus_constant (vtbl_ref,
7477 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7478 /* Discard the initial CONST that was added. */
7479 vtbl_ref = XEXP (vtbl_ref, 0);
7482 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7487 /* Intended for a reference to a buffer of a file-object in Pascal.
7488 But it's not certain that a special tree code will really be
7489 necessary for these. INDIRECT_REF might work for them. */
7495 /* Pascal set IN expression.
7498 rlo = set_low - (set_low%bits_per_word);
7499 the_word = set [ (index - rlo)/bits_per_word ];
7500 bit_index = index % bits_per_word;
7501 bitmask = 1 << bit_index;
7502 return !!(the_word & bitmask); */
7504 tree set = TREE_OPERAND (exp, 0);
7505 tree index = TREE_OPERAND (exp, 1);
7506 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7507 tree set_type = TREE_TYPE (set);
7508 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7509 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7510 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7511 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7512 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7513 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7514 rtx setaddr = XEXP (setval, 0);
7515 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7517 rtx diff, quo, rem, addr, bit, result;
7519 /* If domain is empty, answer is no. Likewise if index is constant
7520 and out of bounds. */
7521 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7522 && TREE_CODE (set_low_bound) == INTEGER_CST
7523 && tree_int_cst_lt (set_high_bound, set_low_bound))
7524 || (TREE_CODE (index) == INTEGER_CST
7525 && TREE_CODE (set_low_bound) == INTEGER_CST
7526 && tree_int_cst_lt (index, set_low_bound))
7527 || (TREE_CODE (set_high_bound) == INTEGER_CST
7528 && TREE_CODE (index) == INTEGER_CST
7529 && tree_int_cst_lt (set_high_bound, index))))
7533 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7535 /* If we get here, we have to generate the code for both cases
7536 (in range and out of range). */
7538 op0 = gen_label_rtx ();
7539 op1 = gen_label_rtx ();
7541 if (! (GET_CODE (index_val) == CONST_INT
7542 && GET_CODE (lo_r) == CONST_INT))
7543 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7544 GET_MODE (index_val), iunsignedp, op1);
7546 if (! (GET_CODE (index_val) == CONST_INT
7547 && GET_CODE (hi_r) == CONST_INT))
7548 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7549 GET_MODE (index_val), iunsignedp, op1);
7551 /* Calculate the element number of bit zero in the first word
7553 if (GET_CODE (lo_r) == CONST_INT)
7554 rlow = GEN_INT (INTVAL (lo_r)
7555 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7557 rlow = expand_binop (index_mode, and_optab, lo_r,
7558 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7559 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7561 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7562 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7564 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7565 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7566 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7567 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7569 addr = memory_address (byte_mode,
7570 expand_binop (index_mode, add_optab, diff,
7571 setaddr, NULL_RTX, iunsignedp,
7574 /* Extract the bit we want to examine. */
7575 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7576 gen_rtx_MEM (byte_mode, addr),
7577 make_tree (TREE_TYPE (index), rem),
7579 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7580 GET_MODE (target) == byte_mode ? target : 0,
7581 1, OPTAB_LIB_WIDEN);
7583 if (result != target)
7584 convert_move (target, result, 1);
7586 /* Output the code to handle the out-of-range case. */
7589 emit_move_insn (target, const0_rtx);
7594 case WITH_CLEANUP_EXPR:
7595 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7597 WITH_CLEANUP_EXPR_RTL (exp)
7598 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7599 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7600 CLEANUP_EH_ONLY (exp));
7602 /* That's it for this cleanup. */
7603 TREE_OPERAND (exp, 1) = 0;
7605 return WITH_CLEANUP_EXPR_RTL (exp);
7607 case CLEANUP_POINT_EXPR:
7609 /* Start a new binding layer that will keep track of all cleanup
7610 actions to be performed. */
7611 expand_start_bindings (2);
7613 target_temp_slot_level = temp_slot_level;
7615 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7616 /* If we're going to use this value, load it up now. */
7618 op0 = force_not_mem (op0);
7619 preserve_temp_slots (op0);
7620 expand_end_bindings (NULL_TREE, 0, 0);
7625 /* Check for a built-in function. */
7626 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7627 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7629 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7631 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7632 == BUILT_IN_FRONTEND)
7633 return (*lang_hooks.expand_expr)
7634 (exp, original_target, tmode, modifier);
7636 return expand_builtin (exp, target, subtarget, tmode, ignore);
7639 return expand_call (exp, target, ignore);
7641 case NON_LVALUE_EXPR:
7644 case REFERENCE_EXPR:
7645 if (TREE_OPERAND (exp, 0) == error_mark_node)
7648 if (TREE_CODE (type) == UNION_TYPE)
7650 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7652 /* If both input and output are BLKmode, this conversion isn't doing
7653 anything except possibly changing memory attribute. */
7654 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7656 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7659 result = copy_rtx (result);
7660 set_mem_attributes (result, exp, 0);
7665 target = assign_temp (type, 0, 1, 1);
7667 if (GET_CODE (target) == MEM)
7668 /* Store data into beginning of memory target. */
7669 store_expr (TREE_OPERAND (exp, 0),
7670 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7672 else if (GET_CODE (target) == REG)
7673 /* Store this field into a union of the proper type. */
7674 store_field (target,
7675 MIN ((int_size_in_bytes (TREE_TYPE
7676 (TREE_OPERAND (exp, 0)))
7678 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7679 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7680 VOIDmode, 0, type, 0);
7684 /* Return the entire union. */
7688 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7690 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7693 /* If the signedness of the conversion differs and OP0 is
7694 a promoted SUBREG, clear that indication since we now
7695 have to do the proper extension. */
7696 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7697 && GET_CODE (op0) == SUBREG)
7698 SUBREG_PROMOTED_VAR_P (op0) = 0;
7703 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7704 if (GET_MODE (op0) == mode)
7707 /* If OP0 is a constant, just convert it into the proper mode. */
7708 if (CONSTANT_P (op0))
7710 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7711 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7713 if (modifier == EXPAND_INITIALIZER)
7714 return simplify_gen_subreg (mode, op0, inner_mode,
7715 subreg_lowpart_offset (mode,
7718 return convert_modes (mode, inner_mode, op0,
7719 TREE_UNSIGNED (inner_type));
7722 if (modifier == EXPAND_INITIALIZER)
7723 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7727 convert_to_mode (mode, op0,
7728 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7730 convert_move (target, op0,
7731 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7734 case VIEW_CONVERT_EXPR:
7735 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7737 /* If the input and output modes are both the same, we are done.
7738 Otherwise, if neither mode is BLKmode and both are within a word, we
7739 can use gen_lowpart. If neither is true, make sure the operand is
7740 in memory and convert the MEM to the new mode. */
7741 if (TYPE_MODE (type) == GET_MODE (op0))
7743 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7744 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7745 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7746 op0 = gen_lowpart (TYPE_MODE (type), op0);
7747 else if (GET_CODE (op0) != MEM)
7749 /* If the operand is not a MEM, force it into memory. Since we
7750 are going to be be changing the mode of the MEM, don't call
7751 force_const_mem for constants because we don't allow pool
7752 constants to change mode. */
7753 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7755 if (TREE_ADDRESSABLE (exp))
7758 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7760 = assign_stack_temp_for_type
7761 (TYPE_MODE (inner_type),
7762 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7764 emit_move_insn (target, op0);
7768 /* At this point, OP0 is in the correct mode. If the output type is such
7769 that the operand is known to be aligned, indicate that it is.
7770 Otherwise, we need only be concerned about alignment for non-BLKmode
7772 if (GET_CODE (op0) == MEM)
7774 op0 = copy_rtx (op0);
7776 if (TYPE_ALIGN_OK (type))
7777 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7778 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7779 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7781 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7782 HOST_WIDE_INT temp_size
7783 = MAX (int_size_in_bytes (inner_type),
7784 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7785 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7786 temp_size, 0, type);
7787 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7789 if (TREE_ADDRESSABLE (exp))
7792 if (GET_MODE (op0) == BLKmode)
7793 emit_block_move (new_with_op0_mode, op0,
7794 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7797 emit_move_insn (new_with_op0_mode, op0);
7802 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7808 this_optab = ! unsignedp && flag_trapv
7809 && (GET_MODE_CLASS (mode) == MODE_INT)
7810 ? addv_optab : add_optab;
7812 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7813 something else, make sure we add the register to the constant and
7814 then to the other thing. This case can occur during strength
7815 reduction and doing it this way will produce better code if the
7816 frame pointer or argument pointer is eliminated.
7818 fold-const.c will ensure that the constant is always in the inner
7819 PLUS_EXPR, so the only case we need to do anything about is if
7820 sp, ap, or fp is our second argument, in which case we must swap
7821 the innermost first argument and our second argument. */
7823 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7824 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7825 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7826 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7827 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7828 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7830 tree t = TREE_OPERAND (exp, 1);
7832 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7833 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7836 /* If the result is to be ptr_mode and we are adding an integer to
7837 something, we might be forming a constant. So try to use
7838 plus_constant. If it produces a sum and we can't accept it,
7839 use force_operand. This allows P = &ARR[const] to generate
7840 efficient code on machines where a SYMBOL_REF is not a valid
7843 If this is an EXPAND_SUM call, always return the sum. */
7844 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7845 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7847 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7848 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7849 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7853 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7855 /* Use immed_double_const to ensure that the constant is
7856 truncated according to the mode of OP1, then sign extended
7857 to a HOST_WIDE_INT. Using the constant directly can result
7858 in non-canonical RTL in a 64x32 cross compile. */
7860 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7862 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7863 op1 = plus_constant (op1, INTVAL (constant_part));
7864 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7865 op1 = force_operand (op1, target);
7869 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7870 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7871 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7875 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7876 (modifier == EXPAND_INITIALIZER
7877 ? EXPAND_INITIALIZER : EXPAND_SUM));
7878 if (! CONSTANT_P (op0))
7880 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7881 VOIDmode, modifier);
7882 /* Don't go to both_summands if modifier
7883 says it's not right to return a PLUS. */
7884 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7888 /* Use immed_double_const to ensure that the constant is
7889 truncated according to the mode of OP1, then sign extended
7890 to a HOST_WIDE_INT. Using the constant directly can result
7891 in non-canonical RTL in a 64x32 cross compile. */
7893 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7895 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7896 op0 = plus_constant (op0, INTVAL (constant_part));
7897 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7898 op0 = force_operand (op0, target);
7903 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7906 /* No sense saving up arithmetic to be done
7907 if it's all in the wrong mode to form part of an address.
7908 And force_operand won't know whether to sign-extend or
7910 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7911 || mode != ptr_mode)
7913 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7914 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7915 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7921 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7922 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7924 /* We come here from MINUS_EXPR when the second operand is a
7927 /* Make sure any term that's a sum with a constant comes last. */
7928 if (GET_CODE (op0) == PLUS
7929 && CONSTANT_P (XEXP (op0, 1)))
7935 /* If adding to a sum including a constant,
7936 associate it to put the constant outside. */
7937 if (GET_CODE (op1) == PLUS
7938 && CONSTANT_P (XEXP (op1, 1)))
7940 rtx constant_term = const0_rtx;
7942 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7945 /* Ensure that MULT comes first if there is one. */
7946 else if (GET_CODE (op0) == MULT)
7947 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7949 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7951 /* Let's also eliminate constants from op0 if possible. */
7952 op0 = eliminate_constant_term (op0, &constant_term);
7954 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7955 their sum should be a constant. Form it into OP1, since the
7956 result we want will then be OP0 + OP1. */
7958 temp = simplify_binary_operation (PLUS, mode, constant_term,
7963 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7966 /* Put a constant term last and put a multiplication first. */
7967 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7968 temp = op1, op1 = op0, op0 = temp;
7970 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7971 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7974 /* For initializers, we are allowed to return a MINUS of two
7975 symbolic constants. Here we handle all cases when both operands
7977 /* Handle difference of two symbolic constants,
7978 for the sake of an initializer. */
7979 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7980 && really_constant_p (TREE_OPERAND (exp, 0))
7981 && really_constant_p (TREE_OPERAND (exp, 1)))
7983 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7985 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7988 /* If the last operand is a CONST_INT, use plus_constant of
7989 the negated constant. Else make the MINUS. */
7990 if (GET_CODE (op1) == CONST_INT)
7991 return plus_constant (op0, - INTVAL (op1));
7993 return gen_rtx_MINUS (mode, op0, op1);
7996 this_optab = ! unsignedp && flag_trapv
7997 && (GET_MODE_CLASS(mode) == MODE_INT)
7998 ? subv_optab : sub_optab;
8000 /* No sense saving up arithmetic to be done
8001 if it's all in the wrong mode to form part of an address.
8002 And force_operand won't know whether to sign-extend or
8004 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8005 || mode != ptr_mode)
8008 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8011 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8012 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8014 /* Convert A - const to A + (-const). */
8015 if (GET_CODE (op1) == CONST_INT)
8017 op1 = negate_rtx (mode, op1);
8024 /* If first operand is constant, swap them.
8025 Thus the following special case checks need only
8026 check the second operand. */
8027 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8029 tree t1 = TREE_OPERAND (exp, 0);
8030 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8031 TREE_OPERAND (exp, 1) = t1;
8034 /* Attempt to return something suitable for generating an
8035 indexed address, for machines that support that. */
8037 if (modifier == EXPAND_SUM && mode == ptr_mode
8038 && host_integerp (TREE_OPERAND (exp, 1), 0))
8040 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8043 /* If we knew for certain that this is arithmetic for an array
8044 reference, and we knew the bounds of the array, then we could
8045 apply the distributive law across (PLUS X C) for constant C.
8046 Without such knowledge, we risk overflowing the computation
8047 when both X and C are large, but X+C isn't. */
8048 /* ??? Could perhaps special-case EXP being unsigned and C being
8049 positive. In that case we are certain that X+C is no smaller
8050 than X and so the transformed expression will overflow iff the
8051 original would have. */
8053 if (GET_CODE (op0) != REG)
8054 op0 = force_operand (op0, NULL_RTX);
8055 if (GET_CODE (op0) != REG)
8056 op0 = copy_to_mode_reg (mode, op0);
8059 gen_rtx_MULT (mode, op0,
8060 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
8063 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8066 /* Check for multiplying things that have been extended
8067 from a narrower type. If this machine supports multiplying
8068 in that narrower type with a result in the desired type,
8069 do it that way, and avoid the explicit type-conversion. */
8070 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8071 && TREE_CODE (type) == INTEGER_TYPE
8072 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8073 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8074 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8075 && int_fits_type_p (TREE_OPERAND (exp, 1),
8076 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8077 /* Don't use a widening multiply if a shift will do. */
8078 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8079 > HOST_BITS_PER_WIDE_INT)
8080 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8082 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8083 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8085 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8086 /* If both operands are extended, they must either both
8087 be zero-extended or both be sign-extended. */
8088 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8090 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8092 enum machine_mode innermode
8093 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8094 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8095 ? smul_widen_optab : umul_widen_optab);
8096 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8097 ? umul_widen_optab : smul_widen_optab);
8098 if (mode == GET_MODE_WIDER_MODE (innermode))
8100 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8102 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8103 NULL_RTX, VOIDmode, 0);
8104 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8105 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8108 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8109 NULL_RTX, VOIDmode, 0);
8112 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8113 && innermode == word_mode)
8116 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8117 NULL_RTX, VOIDmode, 0);
8118 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8119 op1 = convert_modes (innermode, mode,
8120 expand_expr (TREE_OPERAND (exp, 1),
8121 NULL_RTX, VOIDmode, 0),
8124 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8125 NULL_RTX, VOIDmode, 0);
8126 temp = expand_binop (mode, other_optab, op0, op1, target,
8127 unsignedp, OPTAB_LIB_WIDEN);
8128 htem = expand_mult_highpart_adjust (innermode,
8129 gen_highpart (innermode, temp),
8131 gen_highpart (innermode, temp),
8133 emit_move_insn (gen_highpart (innermode, temp), htem);
8138 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8139 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8140 return expand_mult (mode, op0, op1, target, unsignedp);
8142 case TRUNC_DIV_EXPR:
8143 case FLOOR_DIV_EXPR:
8145 case ROUND_DIV_EXPR:
8146 case EXACT_DIV_EXPR:
8147 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8149 /* Possible optimization: compute the dividend with EXPAND_SUM
8150 then if the divisor is constant can optimize the case
8151 where some terms of the dividend have coeffs divisible by it. */
8152 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8153 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8154 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8157 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8158 expensive divide. If not, combine will rebuild the original
8160 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8161 && TREE_CODE (type) == REAL_TYPE
8162 && !real_onep (TREE_OPERAND (exp, 0)))
8163 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8164 build (RDIV_EXPR, type,
8165 build_real (type, dconst1),
8166 TREE_OPERAND (exp, 1))),
8167 target, tmode, unsignedp);
8168 this_optab = sdiv_optab;
8171 case TRUNC_MOD_EXPR:
8172 case FLOOR_MOD_EXPR:
8174 case ROUND_MOD_EXPR:
8175 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8177 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8178 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8179 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8181 case FIX_ROUND_EXPR:
8182 case FIX_FLOOR_EXPR:
8184 abort (); /* Not used for C. */
8186 case FIX_TRUNC_EXPR:
8187 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8189 target = gen_reg_rtx (mode);
8190 expand_fix (target, op0, unsignedp);
8194 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8196 target = gen_reg_rtx (mode);
8197 /* expand_float can't figure out what to do if FROM has VOIDmode.
8198 So give it the correct mode. With -O, cse will optimize this. */
8199 if (GET_MODE (op0) == VOIDmode)
8200 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8202 expand_float (target, op0,
8203 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8207 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8208 temp = expand_unop (mode,
8209 ! unsignedp && flag_trapv
8210 && (GET_MODE_CLASS(mode) == MODE_INT)
8211 ? negv_optab : neg_optab, op0, target, 0);
8217 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8219 /* Handle complex values specially. */
8220 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8221 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8222 return expand_complex_abs (mode, op0, target, unsignedp);
8224 /* Unsigned abs is simply the operand. Testing here means we don't
8225 risk generating incorrect code below. */
8226 if (TREE_UNSIGNED (type))
8229 return expand_abs (mode, op0, target, unsignedp,
8230 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8234 target = original_target;
8235 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8236 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8237 || GET_MODE (target) != mode
8238 || (GET_CODE (target) == REG
8239 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8240 target = gen_reg_rtx (mode);
8241 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8242 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8244 /* First try to do it with a special MIN or MAX instruction.
8245 If that does not win, use a conditional jump to select the proper
8247 this_optab = (TREE_UNSIGNED (type)
8248 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8249 : (code == MIN_EXPR ? smin_optab : smax_optab));
8251 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8256 /* At this point, a MEM target is no longer useful; we will get better
8259 if (GET_CODE (target) == MEM)
8260 target = gen_reg_rtx (mode);
8263 emit_move_insn (target, op0);
8265 op0 = gen_label_rtx ();
8267 /* If this mode is an integer too wide to compare properly,
8268 compare word by word. Rely on cse to optimize constant cases. */
8269 if (GET_MODE_CLASS (mode) == MODE_INT
8270 && ! can_compare_p (GE, mode, ccp_jump))
8272 if (code == MAX_EXPR)
8273 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8274 target, op1, NULL_RTX, op0);
8276 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8277 op1, target, NULL_RTX, op0);
8281 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8282 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8283 unsignedp, mode, NULL_RTX, NULL_RTX,
8286 emit_move_insn (target, op1);
8291 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8292 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8298 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8299 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8304 /* ??? Can optimize bitwise operations with one arg constant.
8305 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8306 and (a bitwise1 b) bitwise2 b (etc)
8307 but that is probably not worth while. */
8309 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8310 boolean values when we want in all cases to compute both of them. In
8311 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8312 as actual zero-or-1 values and then bitwise anding. In cases where
8313 there cannot be any side effects, better code would be made by
8314 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8315 how to recognize those cases. */
8317 case TRUTH_AND_EXPR:
8319 this_optab = and_optab;
8324 this_optab = ior_optab;
8327 case TRUTH_XOR_EXPR:
8329 this_optab = xor_optab;
8336 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8338 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8339 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8342 /* Could determine the answer when only additive constants differ. Also,
8343 the addition of one can be handled by changing the condition. */
8350 case UNORDERED_EXPR:
8357 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8361 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8362 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8364 && GET_CODE (original_target) == REG
8365 && (GET_MODE (original_target)
8366 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8368 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8371 /* If temp is constant, we can just compute the result. */
8372 if (GET_CODE (temp) == CONST_INT)
8374 if (INTVAL (temp) != 0)
8375 emit_move_insn (target, const1_rtx);
8377 emit_move_insn (target, const0_rtx);
8382 if (temp != original_target)
8384 enum machine_mode mode1 = GET_MODE (temp);
8385 if (mode1 == VOIDmode)
8386 mode1 = tmode != VOIDmode ? tmode : mode;
8388 temp = copy_to_mode_reg (mode1, temp);
8391 op1 = gen_label_rtx ();
8392 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8393 GET_MODE (temp), unsignedp, op1);
8394 emit_move_insn (temp, const1_rtx);
8399 /* If no set-flag instruction, must generate a conditional
8400 store into a temporary variable. Drop through
8401 and handle this like && and ||. */
8403 case TRUTH_ANDIF_EXPR:
8404 case TRUTH_ORIF_EXPR:
8406 && (target == 0 || ! safe_from_p (target, exp, 1)
8407 /* Make sure we don't have a hard reg (such as function's return
8408 value) live across basic blocks, if not optimizing. */
8409 || (!optimize && GET_CODE (target) == REG
8410 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8411 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8414 emit_clr_insn (target);
8416 op1 = gen_label_rtx ();
8417 jumpifnot (exp, op1);
8420 emit_0_to_1_insn (target);
8423 return ignore ? const0_rtx : target;
8425 case TRUTH_NOT_EXPR:
8426 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8427 /* The parser is careful to generate TRUTH_NOT_EXPR
8428 only with operands that are always zero or one. */
8429 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8430 target, 1, OPTAB_LIB_WIDEN);
8436 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8438 return expand_expr (TREE_OPERAND (exp, 1),
8439 (ignore ? const0_rtx : target),
8443 /* If we would have a "singleton" (see below) were it not for a
8444 conversion in each arm, bring that conversion back out. */
8445 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8446 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8447 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8448 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8450 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8451 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8453 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8454 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8455 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8456 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8457 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8458 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8459 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8460 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8461 return expand_expr (build1 (NOP_EXPR, type,
8462 build (COND_EXPR, TREE_TYPE (iftrue),
8463 TREE_OPERAND (exp, 0),
8465 target, tmode, modifier);
8469 /* Note that COND_EXPRs whose type is a structure or union
8470 are required to be constructed to contain assignments of
8471 a temporary variable, so that we can evaluate them here
8472 for side effect only. If type is void, we must do likewise. */
8474 /* If an arm of the branch requires a cleanup,
8475 only that cleanup is performed. */
8478 tree binary_op = 0, unary_op = 0;
8480 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8481 convert it to our mode, if necessary. */
8482 if (integer_onep (TREE_OPERAND (exp, 1))
8483 && integer_zerop (TREE_OPERAND (exp, 2))
8484 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8488 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8493 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8494 if (GET_MODE (op0) == mode)
8498 target = gen_reg_rtx (mode);
8499 convert_move (target, op0, unsignedp);
8503 /* Check for X ? A + B : A. If we have this, we can copy A to the
8504 output and conditionally add B. Similarly for unary operations.
8505 Don't do this if X has side-effects because those side effects
8506 might affect A or B and the "?" operation is a sequence point in
8507 ANSI. (operand_equal_p tests for side effects.) */
8509 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8510 && operand_equal_p (TREE_OPERAND (exp, 2),
8511 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8512 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8513 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8514 && operand_equal_p (TREE_OPERAND (exp, 1),
8515 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8516 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8517 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8518 && operand_equal_p (TREE_OPERAND (exp, 2),
8519 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8520 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8521 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8522 && operand_equal_p (TREE_OPERAND (exp, 1),
8523 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8524 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8526 /* If we are not to produce a result, we have no target. Otherwise,
8527 if a target was specified use it; it will not be used as an
8528 intermediate target unless it is safe. If no target, use a
8533 else if (original_target
8534 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8535 || (singleton && GET_CODE (original_target) == REG
8536 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8537 && original_target == var_rtx (singleton)))
8538 && GET_MODE (original_target) == mode
8539 #ifdef HAVE_conditional_move
8540 && (! can_conditionally_move_p (mode)
8541 || GET_CODE (original_target) == REG
8542 || TREE_ADDRESSABLE (type))
8544 && (GET_CODE (original_target) != MEM
8545 || TREE_ADDRESSABLE (type)))
8546 temp = original_target;
8547 else if (TREE_ADDRESSABLE (type))
8550 temp = assign_temp (type, 0, 0, 1);
8552 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8553 do the test of X as a store-flag operation, do this as
8554 A + ((X != 0) << log C). Similarly for other simple binary
8555 operators. Only do for C == 1 if BRANCH_COST is low. */
8556 if (temp && singleton && binary_op
8557 && (TREE_CODE (binary_op) == PLUS_EXPR
8558 || TREE_CODE (binary_op) == MINUS_EXPR
8559 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8560 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8561 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8562 : integer_onep (TREE_OPERAND (binary_op, 1)))
8563 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8566 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8567 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8568 ? addv_optab : add_optab)
8569 : TREE_CODE (binary_op) == MINUS_EXPR
8570 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8571 ? subv_optab : sub_optab)
8572 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8575 /* If we had X ? A : A + 1, do this as A + (X == 0).
8577 We have to invert the truth value here and then put it
8578 back later if do_store_flag fails. We cannot simply copy
8579 TREE_OPERAND (exp, 0) to another variable and modify that
8580 because invert_truthvalue can modify the tree pointed to
8582 if (singleton == TREE_OPERAND (exp, 1))
8583 TREE_OPERAND (exp, 0)
8584 = invert_truthvalue (TREE_OPERAND (exp, 0));
8586 result = do_store_flag (TREE_OPERAND (exp, 0),
8587 (safe_from_p (temp, singleton, 1)
8589 mode, BRANCH_COST <= 1);
8591 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8592 result = expand_shift (LSHIFT_EXPR, mode, result,
8593 build_int_2 (tree_log2
8597 (safe_from_p (temp, singleton, 1)
8598 ? temp : NULL_RTX), 0);
8602 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8603 return expand_binop (mode, boptab, op1, result, temp,
8604 unsignedp, OPTAB_LIB_WIDEN);
8606 else if (singleton == TREE_OPERAND (exp, 1))
8607 TREE_OPERAND (exp, 0)
8608 = invert_truthvalue (TREE_OPERAND (exp, 0));
8611 do_pending_stack_adjust ();
8613 op0 = gen_label_rtx ();
8615 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8619 /* If the target conflicts with the other operand of the
8620 binary op, we can't use it. Also, we can't use the target
8621 if it is a hard register, because evaluating the condition
8622 might clobber it. */
8624 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8625 || (GET_CODE (temp) == REG
8626 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8627 temp = gen_reg_rtx (mode);
8628 store_expr (singleton, temp, 0);
8631 expand_expr (singleton,
8632 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8633 if (singleton == TREE_OPERAND (exp, 1))
8634 jumpif (TREE_OPERAND (exp, 0), op0);
8636 jumpifnot (TREE_OPERAND (exp, 0), op0);
8638 start_cleanup_deferral ();
8639 if (binary_op && temp == 0)
8640 /* Just touch the other operand. */
8641 expand_expr (TREE_OPERAND (binary_op, 1),
8642 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8644 store_expr (build (TREE_CODE (binary_op), type,
8645 make_tree (type, temp),
8646 TREE_OPERAND (binary_op, 1)),
8649 store_expr (build1 (TREE_CODE (unary_op), type,
8650 make_tree (type, temp)),
8654 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8655 comparison operator. If we have one of these cases, set the
8656 output to A, branch on A (cse will merge these two references),
8657 then set the output to FOO. */
8659 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8660 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8661 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8662 TREE_OPERAND (exp, 1), 0)
8663 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8664 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8665 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8667 if (GET_CODE (temp) == REG
8668 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8669 temp = gen_reg_rtx (mode);
8670 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8671 jumpif (TREE_OPERAND (exp, 0), op0);
8673 start_cleanup_deferral ();
8674 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8678 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8679 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8680 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8681 TREE_OPERAND (exp, 2), 0)
8682 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8683 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8684 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8686 if (GET_CODE (temp) == REG
8687 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8688 temp = gen_reg_rtx (mode);
8689 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8690 jumpifnot (TREE_OPERAND (exp, 0), op0);
8692 start_cleanup_deferral ();
8693 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8698 op1 = gen_label_rtx ();
8699 jumpifnot (TREE_OPERAND (exp, 0), op0);
8701 start_cleanup_deferral ();
8703 /* One branch of the cond can be void, if it never returns. For
8704 example A ? throw : E */
8706 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8707 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8709 expand_expr (TREE_OPERAND (exp, 1),
8710 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8711 end_cleanup_deferral ();
8713 emit_jump_insn (gen_jump (op1));
8716 start_cleanup_deferral ();
8718 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8719 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8721 expand_expr (TREE_OPERAND (exp, 2),
8722 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8725 end_cleanup_deferral ();
8736 /* Something needs to be initialized, but we didn't know
8737 where that thing was when building the tree. For example,
8738 it could be the return value of a function, or a parameter
8739 to a function which lays down in the stack, or a temporary
8740 variable which must be passed by reference.
8742 We guarantee that the expression will either be constructed
8743 or copied into our original target. */
8745 tree slot = TREE_OPERAND (exp, 0);
8746 tree cleanups = NULL_TREE;
8749 if (TREE_CODE (slot) != VAR_DECL)
8753 target = original_target;
8755 /* Set this here so that if we get a target that refers to a
8756 register variable that's already been used, put_reg_into_stack
8757 knows that it should fix up those uses. */
8758 TREE_USED (slot) = 1;
8762 if (DECL_RTL_SET_P (slot))
8764 target = DECL_RTL (slot);
8765 /* If we have already expanded the slot, so don't do
8767 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8772 target = assign_temp (type, 2, 0, 1);
8773 /* All temp slots at this level must not conflict. */
8774 preserve_temp_slots (target);
8775 SET_DECL_RTL (slot, target);
8776 if (TREE_ADDRESSABLE (slot))
8777 put_var_into_stack (slot);
8779 /* Since SLOT is not known to the called function
8780 to belong to its stack frame, we must build an explicit
8781 cleanup. This case occurs when we must build up a reference
8782 to pass the reference as an argument. In this case,
8783 it is very likely that such a reference need not be
8786 if (TREE_OPERAND (exp, 2) == 0)
8787 TREE_OPERAND (exp, 2)
8788 = (*lang_hooks.maybe_build_cleanup) (slot);
8789 cleanups = TREE_OPERAND (exp, 2);
8794 /* This case does occur, when expanding a parameter which
8795 needs to be constructed on the stack. The target
8796 is the actual stack address that we want to initialize.
8797 The function we call will perform the cleanup in this case. */
8799 /* If we have already assigned it space, use that space,
8800 not target that we were passed in, as our target
8801 parameter is only a hint. */
8802 if (DECL_RTL_SET_P (slot))
8804 target = DECL_RTL (slot);
8805 /* If we have already expanded the slot, so don't do
8807 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8812 SET_DECL_RTL (slot, target);
8813 /* If we must have an addressable slot, then make sure that
8814 the RTL that we just stored in slot is OK. */
8815 if (TREE_ADDRESSABLE (slot))
8816 put_var_into_stack (slot);
8820 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8821 /* Mark it as expanded. */
8822 TREE_OPERAND (exp, 1) = NULL_TREE;
8824 store_expr (exp1, target, 0);
8826 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8833 tree lhs = TREE_OPERAND (exp, 0);
8834 tree rhs = TREE_OPERAND (exp, 1);
8836 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8842 /* If lhs is complex, expand calls in rhs before computing it.
8843 That's so we don't compute a pointer and save it over a
8844 call. If lhs is simple, compute it first so we can give it
8845 as a target if the rhs is just a call. This avoids an
8846 extra temp and copy and that prevents a partial-subsumption
8847 which makes bad code. Actually we could treat
8848 component_ref's of vars like vars. */
8850 tree lhs = TREE_OPERAND (exp, 0);
8851 tree rhs = TREE_OPERAND (exp, 1);
8855 /* Check for |= or &= of a bitfield of size one into another bitfield
8856 of size 1. In this case, (unless we need the result of the
8857 assignment) we can do this more efficiently with a
8858 test followed by an assignment, if necessary.
8860 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8861 things change so we do, this code should be enhanced to
8864 && TREE_CODE (lhs) == COMPONENT_REF
8865 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8866 || TREE_CODE (rhs) == BIT_AND_EXPR)
8867 && TREE_OPERAND (rhs, 0) == lhs
8868 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8869 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8870 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8872 rtx label = gen_label_rtx ();
8874 do_jump (TREE_OPERAND (rhs, 1),
8875 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8876 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8877 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8878 (TREE_CODE (rhs) == BIT_IOR_EXPR
8880 : integer_zero_node)),
8882 do_pending_stack_adjust ();
8887 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8893 if (!TREE_OPERAND (exp, 0))
8894 expand_null_return ();
8896 expand_return (TREE_OPERAND (exp, 0));
8899 case PREINCREMENT_EXPR:
8900 case PREDECREMENT_EXPR:
8901 return expand_increment (exp, 0, ignore);
8903 case POSTINCREMENT_EXPR:
8904 case POSTDECREMENT_EXPR:
8905 /* Faster to treat as pre-increment if result is not used. */
8906 return expand_increment (exp, ! ignore, ignore);
8909 /* Are we taking the address of a nested function? */
8910 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8911 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8912 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8913 && ! TREE_STATIC (exp))
8915 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8916 op0 = force_operand (op0, target);
8918 /* If we are taking the address of something erroneous, just
8920 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8922 /* If we are taking the address of a constant and are at the
8923 top level, we have to use output_constant_def since we can't
8924 call force_const_mem at top level. */
8926 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8927 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8929 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8932 /* We make sure to pass const0_rtx down if we came in with
8933 ignore set, to avoid doing the cleanups twice for something. */
8934 op0 = expand_expr (TREE_OPERAND (exp, 0),
8935 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8936 (modifier == EXPAND_INITIALIZER
8937 ? modifier : EXPAND_CONST_ADDRESS));
8939 /* If we are going to ignore the result, OP0 will have been set
8940 to const0_rtx, so just return it. Don't get confused and
8941 think we are taking the address of the constant. */
8945 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8946 clever and returns a REG when given a MEM. */
8947 op0 = protect_from_queue (op0, 1);
8949 /* We would like the object in memory. If it is a constant, we can
8950 have it be statically allocated into memory. For a non-constant,
8951 we need to allocate some memory and store the value into it. */
8953 if (CONSTANT_P (op0))
8954 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8956 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8957 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8958 || GET_CODE (op0) == PARALLEL)
8960 /* If the operand is a SAVE_EXPR, we can deal with this by
8961 forcing the SAVE_EXPR into memory. */
8962 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8964 put_var_into_stack (TREE_OPERAND (exp, 0));
8965 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8969 /* If this object is in a register, it can't be BLKmode. */
8970 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8971 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8973 if (GET_CODE (op0) == PARALLEL)
8974 /* Handle calls that pass values in multiple
8975 non-contiguous locations. The Irix 6 ABI has examples
8977 emit_group_store (memloc, op0,
8978 int_size_in_bytes (inner_type));
8980 emit_move_insn (memloc, op0);
8986 if (GET_CODE (op0) != MEM)
8989 mark_temp_addr_taken (op0);
8990 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8992 op0 = XEXP (op0, 0);
8993 #ifdef POINTERS_EXTEND_UNSIGNED
8994 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8995 && mode == ptr_mode)
8996 op0 = convert_memory_address (ptr_mode, op0);
9001 /* If OP0 is not aligned as least as much as the type requires, we
9002 need to make a temporary, copy OP0 to it, and take the address of
9003 the temporary. We want to use the alignment of the type, not of
9004 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9005 the test for BLKmode means that can't happen. The test for
9006 BLKmode is because we never make mis-aligned MEMs with
9009 We don't need to do this at all if the machine doesn't have
9010 strict alignment. */
9011 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9012 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9014 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9016 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9018 = assign_stack_temp_for_type
9019 (TYPE_MODE (inner_type),
9020 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9021 : int_size_in_bytes (inner_type),
9022 1, build_qualified_type (inner_type,
9023 (TYPE_QUALS (inner_type)
9024 | TYPE_QUAL_CONST)));
9026 if (TYPE_ALIGN_OK (inner_type))
9029 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9034 op0 = force_operand (XEXP (op0, 0), target);
9038 && GET_CODE (op0) != REG
9039 && modifier != EXPAND_CONST_ADDRESS
9040 && modifier != EXPAND_INITIALIZER
9041 && modifier != EXPAND_SUM)
9042 op0 = force_reg (Pmode, op0);
9044 if (GET_CODE (op0) == REG
9045 && ! REG_USERVAR_P (op0))
9046 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9048 #ifdef POINTERS_EXTEND_UNSIGNED
9049 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9050 && mode == ptr_mode)
9051 op0 = convert_memory_address (ptr_mode, op0);
9056 case ENTRY_VALUE_EXPR:
9059 /* COMPLEX type for Extended Pascal & Fortran */
9062 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9065 /* Get the rtx code of the operands. */
9066 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9067 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9070 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9074 /* Move the real (op0) and imaginary (op1) parts to their location. */
9075 emit_move_insn (gen_realpart (mode, target), op0);
9076 emit_move_insn (gen_imagpart (mode, target), op1);
9078 insns = get_insns ();
9081 /* Complex construction should appear as a single unit. */
9082 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9083 each with a separate pseudo as destination.
9084 It's not correct for flow to treat them as a unit. */
9085 if (GET_CODE (target) != CONCAT)
9086 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9094 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9095 return gen_realpart (mode, op0);
9098 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9099 return gen_imagpart (mode, op0);
9103 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9107 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9110 target = gen_reg_rtx (mode);
9114 /* Store the realpart and the negated imagpart to target. */
9115 emit_move_insn (gen_realpart (partmode, target),
9116 gen_realpart (partmode, op0));
9118 imag_t = gen_imagpart (partmode, target);
9119 temp = expand_unop (partmode,
9120 ! unsignedp && flag_trapv
9121 && (GET_MODE_CLASS(partmode) == MODE_INT)
9122 ? negv_optab : neg_optab,
9123 gen_imagpart (partmode, op0), imag_t, 0);
9125 emit_move_insn (imag_t, temp);
9127 insns = get_insns ();
9130 /* Conjugate should appear as a single unit
9131 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9132 each with a separate pseudo as destination.
9133 It's not correct for flow to treat them as a unit. */
9134 if (GET_CODE (target) != CONCAT)
9135 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9142 case TRY_CATCH_EXPR:
9144 tree handler = TREE_OPERAND (exp, 1);
9146 expand_eh_region_start ();
9148 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9150 expand_eh_region_end_cleanup (handler);
9155 case TRY_FINALLY_EXPR:
9157 tree try_block = TREE_OPERAND (exp, 0);
9158 tree finally_block = TREE_OPERAND (exp, 1);
9160 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9162 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9163 is not sufficient, so we cannot expand the block twice.
9164 So we play games with GOTO_SUBROUTINE_EXPR to let us
9165 expand the thing only once. */
9166 /* When not optimizing, we go ahead with this form since
9167 (1) user breakpoints operate more predictably without
9168 code duplication, and
9169 (2) we're not running any of the global optimizers
9170 that would explode in time/space with the highly
9171 connected CFG created by the indirect branching. */
9173 rtx finally_label = gen_label_rtx ();
9174 rtx done_label = gen_label_rtx ();
9175 rtx return_link = gen_reg_rtx (Pmode);
9176 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9177 (tree) finally_label, (tree) return_link);
9178 TREE_SIDE_EFFECTS (cleanup) = 1;
9180 /* Start a new binding layer that will keep track of all cleanup
9181 actions to be performed. */
9182 expand_start_bindings (2);
9183 target_temp_slot_level = temp_slot_level;
9185 expand_decl_cleanup (NULL_TREE, cleanup);
9186 op0 = expand_expr (try_block, target, tmode, modifier);
9188 preserve_temp_slots (op0);
9189 expand_end_bindings (NULL_TREE, 0, 0);
9190 emit_jump (done_label);
9191 emit_label (finally_label);
9192 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9193 emit_indirect_jump (return_link);
9194 emit_label (done_label);
9198 expand_start_bindings (2);
9199 target_temp_slot_level = temp_slot_level;
9201 expand_decl_cleanup (NULL_TREE, finally_block);
9202 op0 = expand_expr (try_block, target, tmode, modifier);
9204 preserve_temp_slots (op0);
9205 expand_end_bindings (NULL_TREE, 0, 0);
9211 case GOTO_SUBROUTINE_EXPR:
9213 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9214 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9215 rtx return_address = gen_label_rtx ();
9216 emit_move_insn (return_link,
9217 gen_rtx_LABEL_REF (Pmode, return_address));
9219 emit_label (return_address);
9224 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9227 return get_exception_pointer (cfun);
9230 /* Function descriptors are not valid except for as
9231 initialization constants, and should not be expanded. */
9235 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9238 /* Here to do an ordinary binary operator, generating an instruction
9239 from the optab already placed in `this_optab'. */
9241 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9243 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9244 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9246 temp = expand_binop (mode, this_optab, op0, op1, target,
9247 unsignedp, OPTAB_LIB_WIDEN);
9253 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9254 when applied to the address of EXP produces an address known to be
9255 aligned more than BIGGEST_ALIGNMENT. */
9258 is_aligning_offset (offset, exp)
9262 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9263 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9264 || TREE_CODE (offset) == NOP_EXPR
9265 || TREE_CODE (offset) == CONVERT_EXPR
9266 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9267 offset = TREE_OPERAND (offset, 0);
9269 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9270 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9271 if (TREE_CODE (offset) != BIT_AND_EXPR
9272 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9273 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9274 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9277 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9278 It must be NEGATE_EXPR. Then strip any more conversions. */
9279 offset = TREE_OPERAND (offset, 0);
9280 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9281 || TREE_CODE (offset) == NOP_EXPR
9282 || TREE_CODE (offset) == CONVERT_EXPR)
9283 offset = TREE_OPERAND (offset, 0);
9285 if (TREE_CODE (offset) != NEGATE_EXPR)
9288 offset = TREE_OPERAND (offset, 0);
9289 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9290 || TREE_CODE (offset) == NOP_EXPR
9291 || TREE_CODE (offset) == CONVERT_EXPR)
9292 offset = TREE_OPERAND (offset, 0);
9294 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9295 whose type is the same as EXP. */
9296 return (TREE_CODE (offset) == ADDR_EXPR
9297 && (TREE_OPERAND (offset, 0) == exp
9298 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9299 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9300 == TREE_TYPE (exp)))));
9303 /* Return the tree node if an ARG corresponds to a string constant or zero
9304 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9305 in bytes within the string that ARG is accessing. The type of the
9306 offset will be `sizetype'. */
9309 string_constant (arg, ptr_offset)
9315 if (TREE_CODE (arg) == ADDR_EXPR
9316 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9318 *ptr_offset = size_zero_node;
9319 return TREE_OPERAND (arg, 0);
9321 else if (TREE_CODE (arg) == PLUS_EXPR)
9323 tree arg0 = TREE_OPERAND (arg, 0);
9324 tree arg1 = TREE_OPERAND (arg, 1);
9329 if (TREE_CODE (arg0) == ADDR_EXPR
9330 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9332 *ptr_offset = convert (sizetype, arg1);
9333 return TREE_OPERAND (arg0, 0);
9335 else if (TREE_CODE (arg1) == ADDR_EXPR
9336 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9338 *ptr_offset = convert (sizetype, arg0);
9339 return TREE_OPERAND (arg1, 0);
9346 /* Expand code for a post- or pre- increment or decrement
9347 and return the RTX for the result.
9348 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9351 expand_increment (exp, post, ignore)
9357 tree incremented = TREE_OPERAND (exp, 0);
9358 optab this_optab = add_optab;
9360 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9361 int op0_is_copy = 0;
9362 int single_insn = 0;
9363 /* 1 means we can't store into OP0 directly,
9364 because it is a subreg narrower than a word,
9365 and we don't dare clobber the rest of the word. */
9368 /* Stabilize any component ref that might need to be
9369 evaluated more than once below. */
9371 || TREE_CODE (incremented) == BIT_FIELD_REF
9372 || (TREE_CODE (incremented) == COMPONENT_REF
9373 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9374 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9375 incremented = stabilize_reference (incremented);
9376 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9377 ones into save exprs so that they don't accidentally get evaluated
9378 more than once by the code below. */
9379 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9380 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9381 incremented = save_expr (incremented);
9383 /* Compute the operands as RTX.
9384 Note whether OP0 is the actual lvalue or a copy of it:
9385 I believe it is a copy iff it is a register or subreg
9386 and insns were generated in computing it. */
9388 temp = get_last_insn ();
9389 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9391 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9392 in place but instead must do sign- or zero-extension during assignment,
9393 so we copy it into a new register and let the code below use it as
9396 Note that we can safely modify this SUBREG since it is know not to be
9397 shared (it was made by the expand_expr call above). */
9399 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9402 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9406 else if (GET_CODE (op0) == SUBREG
9407 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9409 /* We cannot increment this SUBREG in place. If we are
9410 post-incrementing, get a copy of the old value. Otherwise,
9411 just mark that we cannot increment in place. */
9413 op0 = copy_to_reg (op0);
9418 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9419 && temp != get_last_insn ());
9420 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9422 /* Decide whether incrementing or decrementing. */
9423 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9424 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9425 this_optab = sub_optab;
9427 /* Convert decrement by a constant into a negative increment. */
9428 if (this_optab == sub_optab
9429 && GET_CODE (op1) == CONST_INT)
9431 op1 = GEN_INT (-INTVAL (op1));
9432 this_optab = add_optab;
9435 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9436 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9438 /* For a preincrement, see if we can do this with a single instruction. */
9441 icode = (int) this_optab->handlers[(int) mode].insn_code;
9442 if (icode != (int) CODE_FOR_nothing
9443 /* Make sure that OP0 is valid for operands 0 and 1
9444 of the insn we want to queue. */
9445 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9446 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9447 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9451 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9452 then we cannot just increment OP0. We must therefore contrive to
9453 increment the original value. Then, for postincrement, we can return
9454 OP0 since it is a copy of the old value. For preincrement, expand here
9455 unless we can do it with a single insn.
9457 Likewise if storing directly into OP0 would clobber high bits
9458 we need to preserve (bad_subreg). */
9459 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9461 /* This is the easiest way to increment the value wherever it is.
9462 Problems with multiple evaluation of INCREMENTED are prevented
9463 because either (1) it is a component_ref or preincrement,
9464 in which case it was stabilized above, or (2) it is an array_ref
9465 with constant index in an array in a register, which is
9466 safe to reevaluate. */
9467 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9468 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9469 ? MINUS_EXPR : PLUS_EXPR),
9472 TREE_OPERAND (exp, 1));
9474 while (TREE_CODE (incremented) == NOP_EXPR
9475 || TREE_CODE (incremented) == CONVERT_EXPR)
9477 newexp = convert (TREE_TYPE (incremented), newexp);
9478 incremented = TREE_OPERAND (incremented, 0);
9481 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9482 return post ? op0 : temp;
9487 /* We have a true reference to the value in OP0.
9488 If there is an insn to add or subtract in this mode, queue it.
9489 Queueing the increment insn avoids the register shuffling
9490 that often results if we must increment now and first save
9491 the old value for subsequent use. */
9493 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9494 op0 = stabilize (op0);
9497 icode = (int) this_optab->handlers[(int) mode].insn_code;
9498 if (icode != (int) CODE_FOR_nothing
9499 /* Make sure that OP0 is valid for operands 0 and 1
9500 of the insn we want to queue. */
9501 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9502 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9504 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9505 op1 = force_reg (mode, op1);
9507 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9509 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9511 rtx addr = (general_operand (XEXP (op0, 0), mode)
9512 ? force_reg (Pmode, XEXP (op0, 0))
9513 : copy_to_reg (XEXP (op0, 0)));
9516 op0 = replace_equiv_address (op0, addr);
9517 temp = force_reg (GET_MODE (op0), op0);
9518 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9519 op1 = force_reg (mode, op1);
9521 /* The increment queue is LIFO, thus we have to `queue'
9522 the instructions in reverse order. */
9523 enqueue_insn (op0, gen_move_insn (op0, temp));
9524 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9529 /* Preincrement, or we can't increment with one simple insn. */
9531 /* Save a copy of the value before inc or dec, to return it later. */
9532 temp = value = copy_to_reg (op0);
9534 /* Arrange to return the incremented value. */
9535 /* Copy the rtx because expand_binop will protect from the queue,
9536 and the results of that would be invalid for us to return
9537 if our caller does emit_queue before using our result. */
9538 temp = copy_rtx (value = op0);
9540 /* Increment however we can. */
9541 op1 = expand_binop (mode, this_optab, value, op1, op0,
9542 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9544 /* Make sure the value is stored into OP0. */
9546 emit_move_insn (op0, op1);
9551 /* At the start of a function, record that we have no previously-pushed
9552 arguments waiting to be popped. */
9555 init_pending_stack_adjust ()
9557 pending_stack_adjust = 0;
9560 /* When exiting from function, if safe, clear out any pending stack adjust
9561 so the adjustment won't get done.
9563 Note, if the current function calls alloca, then it must have a
9564 frame pointer regardless of the value of flag_omit_frame_pointer. */
9567 clear_pending_stack_adjust ()
9569 #ifdef EXIT_IGNORE_STACK
9571 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9572 && EXIT_IGNORE_STACK
9573 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9574 && ! flag_inline_functions)
9576 stack_pointer_delta -= pending_stack_adjust,
9577 pending_stack_adjust = 0;
9582 /* Pop any previously-pushed arguments that have not been popped yet. */
9585 do_pending_stack_adjust ()
9587 if (inhibit_defer_pop == 0)
9589 if (pending_stack_adjust != 0)
9590 adjust_stack (GEN_INT (pending_stack_adjust));
9591 pending_stack_adjust = 0;
9595 /* Expand conditional expressions. */
9597 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9598 LABEL is an rtx of code CODE_LABEL, in this function and all the
9602 jumpifnot (exp, label)
9606 do_jump (exp, label, NULL_RTX);
9609 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9616 do_jump (exp, NULL_RTX, label);
9619 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9620 the result is zero, or IF_TRUE_LABEL if the result is one.
9621 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9622 meaning fall through in that case.
9624 do_jump always does any pending stack adjust except when it does not
9625 actually perform a jump. An example where there is no jump
9626 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9628 This function is responsible for optimizing cases such as
9629 &&, || and comparison operators in EXP. */
9632 do_jump (exp, if_false_label, if_true_label)
9634 rtx if_false_label, if_true_label;
9636 enum tree_code code = TREE_CODE (exp);
9637 /* Some cases need to create a label to jump to
9638 in order to properly fall through.
9639 These cases set DROP_THROUGH_LABEL nonzero. */
9640 rtx drop_through_label = 0;
9644 enum machine_mode mode;
9646 #ifdef MAX_INTEGER_COMPUTATION_MODE
9647 check_max_integer_computation_mode (exp);
9658 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9664 /* This is not true with #pragma weak */
9666 /* The address of something can never be zero. */
9668 emit_jump (if_true_label);
9673 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9674 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9675 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9676 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9679 /* If we are narrowing the operand, we have to do the compare in the
9681 if ((TYPE_PRECISION (TREE_TYPE (exp))
9682 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9684 case NON_LVALUE_EXPR:
9685 case REFERENCE_EXPR:
9690 /* These cannot change zero->nonzero or vice versa. */
9691 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9694 case WITH_RECORD_EXPR:
9695 /* Put the object on the placeholder list, recurse through our first
9696 operand, and pop the list. */
9697 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9699 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9700 placeholder_list = TREE_CHAIN (placeholder_list);
9704 /* This is never less insns than evaluating the PLUS_EXPR followed by
9705 a test and can be longer if the test is eliminated. */
9707 /* Reduce to minus. */
9708 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9709 TREE_OPERAND (exp, 0),
9710 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9711 TREE_OPERAND (exp, 1))));
9712 /* Process as MINUS. */
9716 /* Nonzero iff operands of minus differ. */
9717 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9718 TREE_OPERAND (exp, 0),
9719 TREE_OPERAND (exp, 1)),
9720 NE, NE, if_false_label, if_true_label);
9724 /* If we are AND'ing with a small constant, do this comparison in the
9725 smallest type that fits. If the machine doesn't have comparisons
9726 that small, it will be converted back to the wider comparison.
9727 This helps if we are testing the sign bit of a narrower object.
9728 combine can't do this for us because it can't know whether a
9729 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9731 if (! SLOW_BYTE_ACCESS
9732 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9733 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9734 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9735 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9736 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9737 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9738 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9739 != CODE_FOR_nothing))
9741 do_jump (convert (type, exp), if_false_label, if_true_label);
9746 case TRUTH_NOT_EXPR:
9747 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9750 case TRUTH_ANDIF_EXPR:
9751 if (if_false_label == 0)
9752 if_false_label = drop_through_label = gen_label_rtx ();
9753 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9754 start_cleanup_deferral ();
9755 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9756 end_cleanup_deferral ();
9759 case TRUTH_ORIF_EXPR:
9760 if (if_true_label == 0)
9761 if_true_label = drop_through_label = gen_label_rtx ();
9762 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9763 start_cleanup_deferral ();
9764 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9765 end_cleanup_deferral ();
9770 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9771 preserve_temp_slots (NULL_RTX);
9775 do_pending_stack_adjust ();
9776 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9782 case ARRAY_RANGE_REF:
9784 HOST_WIDE_INT bitsize, bitpos;
9786 enum machine_mode mode;
9791 /* Get description of this reference. We don't actually care
9792 about the underlying object here. */
9793 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9794 &unsignedp, &volatilep);
9796 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9797 if (! SLOW_BYTE_ACCESS
9798 && type != 0 && bitsize >= 0
9799 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9800 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9801 != CODE_FOR_nothing))
9803 do_jump (convert (type, exp), if_false_label, if_true_label);
9810 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9811 if (integer_onep (TREE_OPERAND (exp, 1))
9812 && integer_zerop (TREE_OPERAND (exp, 2)))
9813 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9815 else if (integer_zerop (TREE_OPERAND (exp, 1))
9816 && integer_onep (TREE_OPERAND (exp, 2)))
9817 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9821 rtx label1 = gen_label_rtx ();
9822 drop_through_label = gen_label_rtx ();
9824 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9826 start_cleanup_deferral ();
9827 /* Now the THEN-expression. */
9828 do_jump (TREE_OPERAND (exp, 1),
9829 if_false_label ? if_false_label : drop_through_label,
9830 if_true_label ? if_true_label : drop_through_label);
9831 /* In case the do_jump just above never jumps. */
9832 do_pending_stack_adjust ();
9833 emit_label (label1);
9835 /* Now the ELSE-expression. */
9836 do_jump (TREE_OPERAND (exp, 2),
9837 if_false_label ? if_false_label : drop_through_label,
9838 if_true_label ? if_true_label : drop_through_label);
9839 end_cleanup_deferral ();
9845 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9847 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9848 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9850 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9851 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9854 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9855 fold (build (EQ_EXPR, TREE_TYPE (exp),
9856 fold (build1 (REALPART_EXPR,
9857 TREE_TYPE (inner_type),
9859 fold (build1 (REALPART_EXPR,
9860 TREE_TYPE (inner_type),
9862 fold (build (EQ_EXPR, TREE_TYPE (exp),
9863 fold (build1 (IMAGPART_EXPR,
9864 TREE_TYPE (inner_type),
9866 fold (build1 (IMAGPART_EXPR,
9867 TREE_TYPE (inner_type),
9869 if_false_label, if_true_label);
9872 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9873 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9875 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9876 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9877 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9879 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9885 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9887 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9888 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9890 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9891 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9894 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9895 fold (build (NE_EXPR, TREE_TYPE (exp),
9896 fold (build1 (REALPART_EXPR,
9897 TREE_TYPE (inner_type),
9899 fold (build1 (REALPART_EXPR,
9900 TREE_TYPE (inner_type),
9902 fold (build (NE_EXPR, TREE_TYPE (exp),
9903 fold (build1 (IMAGPART_EXPR,
9904 TREE_TYPE (inner_type),
9906 fold (build1 (IMAGPART_EXPR,
9907 TREE_TYPE (inner_type),
9909 if_false_label, if_true_label);
9912 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9913 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9915 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9916 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9917 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9919 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9924 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9925 if (GET_MODE_CLASS (mode) == MODE_INT
9926 && ! can_compare_p (LT, mode, ccp_jump))
9927 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9929 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9933 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9934 if (GET_MODE_CLASS (mode) == MODE_INT
9935 && ! can_compare_p (LE, mode, ccp_jump))
9936 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9938 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9942 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9943 if (GET_MODE_CLASS (mode) == MODE_INT
9944 && ! can_compare_p (GT, mode, ccp_jump))
9945 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9947 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9951 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9952 if (GET_MODE_CLASS (mode) == MODE_INT
9953 && ! can_compare_p (GE, mode, ccp_jump))
9954 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9956 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9959 case UNORDERED_EXPR:
9962 enum rtx_code cmp, rcmp;
9965 if (code == UNORDERED_EXPR)
9966 cmp = UNORDERED, rcmp = ORDERED;
9968 cmp = ORDERED, rcmp = UNORDERED;
9969 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9972 if (! can_compare_p (cmp, mode, ccp_jump)
9973 && (can_compare_p (rcmp, mode, ccp_jump)
9974 /* If the target doesn't provide either UNORDERED or ORDERED
9975 comparisons, canonicalize on UNORDERED for the library. */
9976 || rcmp == UNORDERED))
9980 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9982 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9987 enum rtx_code rcode1;
9988 enum tree_code tcode2;
10001 goto unordered_bcc;
10005 goto unordered_bcc;
10009 goto unordered_bcc;
10012 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10013 if (can_compare_p (rcode1, mode, ccp_jump))
10014 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
10018 tree op0 = save_expr (TREE_OPERAND (exp, 0));
10019 tree op1 = save_expr (TREE_OPERAND (exp, 1));
10022 /* If the target doesn't support combined unordered
10023 compares, decompose into UNORDERED + comparison. */
10024 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
10025 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
10026 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
10027 do_jump (exp, if_false_label, if_true_label);
10033 __builtin_expect (<test>, 0) and
10034 __builtin_expect (<test>, 1)
10036 We need to do this here, so that <test> is not converted to a SCC
10037 operation on machines that use condition code registers and COMPARE
10038 like the PowerPC, and then the jump is done based on whether the SCC
10039 operation produced a 1 or 0. */
10041 /* Check for a built-in function. */
10042 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10044 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10045 tree arglist = TREE_OPERAND (exp, 1);
10047 if (TREE_CODE (fndecl) == FUNCTION_DECL
10048 && DECL_BUILT_IN (fndecl)
10049 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10050 && arglist != NULL_TREE
10051 && TREE_CHAIN (arglist) != NULL_TREE)
10053 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10056 if (seq != NULL_RTX)
10063 /* fall through and generate the normal code. */
10067 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10069 /* This is not needed any more and causes poor code since it causes
10070 comparisons and tests from non-SI objects to have different code
10072 /* Copy to register to avoid generating bad insns by cse
10073 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10074 if (!cse_not_expected && GET_CODE (temp) == MEM)
10075 temp = copy_to_reg (temp);
10077 do_pending_stack_adjust ();
10078 /* Do any postincrements in the expression that was tested. */
10081 if (GET_CODE (temp) == CONST_INT
10082 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10083 || GET_CODE (temp) == LABEL_REF)
10085 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10087 emit_jump (target);
10089 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10090 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10091 /* Note swapping the labels gives us not-equal. */
10092 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10093 else if (GET_MODE (temp) != VOIDmode)
10094 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10095 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10096 GET_MODE (temp), NULL_RTX,
10097 if_false_label, if_true_label);
10102 if (drop_through_label)
10104 /* If do_jump produces code that might be jumped around,
10105 do any stack adjusts from that code, before the place
10106 where control merges in. */
10107 do_pending_stack_adjust ();
10108 emit_label (drop_through_label);
10112 /* Given a comparison expression EXP for values too wide to be compared
10113 with one insn, test the comparison and jump to the appropriate label.
10114 The code of EXP is ignored; we always test GT if SWAP is 0,
10115 and LT if SWAP is 1. */
10118 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10121 rtx if_false_label, if_true_label;
10123 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10124 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10125 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10126 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10128 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10131 /* Compare OP0 with OP1, word at a time, in mode MODE.
10132 UNSIGNEDP says to do unsigned comparison.
10133 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10136 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10137 enum machine_mode mode;
10140 rtx if_false_label, if_true_label;
10142 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10143 rtx drop_through_label = 0;
10146 if (! if_true_label || ! if_false_label)
10147 drop_through_label = gen_label_rtx ();
10148 if (! if_true_label)
10149 if_true_label = drop_through_label;
10150 if (! if_false_label)
10151 if_false_label = drop_through_label;
10153 /* Compare a word at a time, high order first. */
10154 for (i = 0; i < nwords; i++)
10156 rtx op0_word, op1_word;
10158 if (WORDS_BIG_ENDIAN)
10160 op0_word = operand_subword_force (op0, i, mode);
10161 op1_word = operand_subword_force (op1, i, mode);
10165 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10166 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10169 /* All but high-order word must be compared as unsigned. */
10170 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10171 (unsignedp || i > 0), word_mode, NULL_RTX,
10172 NULL_RTX, if_true_label);
10174 /* Consider lower words only if these are equal. */
10175 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10176 NULL_RTX, NULL_RTX, if_false_label);
10179 if (if_false_label)
10180 emit_jump (if_false_label);
10181 if (drop_through_label)
10182 emit_label (drop_through_label);
10185 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10186 with one insn, test the comparison and jump to the appropriate label. */
10189 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10191 rtx if_false_label, if_true_label;
10193 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10194 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10195 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10196 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10198 rtx drop_through_label = 0;
10200 if (! if_false_label)
10201 drop_through_label = if_false_label = gen_label_rtx ();
10203 for (i = 0; i < nwords; i++)
10204 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10205 operand_subword_force (op1, i, mode),
10206 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10207 word_mode, NULL_RTX, if_false_label, NULL_RTX);
10210 emit_jump (if_true_label);
10211 if (drop_through_label)
10212 emit_label (drop_through_label);
10215 /* Jump according to whether OP0 is 0.
10216 We assume that OP0 has an integer mode that is too wide
10217 for the available compare insns. */
10220 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10222 rtx if_false_label, if_true_label;
10224 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10227 rtx drop_through_label = 0;
10229 /* The fastest way of doing this comparison on almost any machine is to
10230 "or" all the words and compare the result. If all have to be loaded
10231 from memory and this is a very wide item, it's possible this may
10232 be slower, but that's highly unlikely. */
10234 part = gen_reg_rtx (word_mode);
10235 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10236 for (i = 1; i < nwords && part != 0; i++)
10237 part = expand_binop (word_mode, ior_optab, part,
10238 operand_subword_force (op0, i, GET_MODE (op0)),
10239 part, 1, OPTAB_WIDEN);
10243 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10244 NULL_RTX, if_false_label, if_true_label);
10249 /* If we couldn't do the "or" simply, do this with a series of compares. */
10250 if (! if_false_label)
10251 drop_through_label = if_false_label = gen_label_rtx ();
10253 for (i = 0; i < nwords; i++)
10254 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10255 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10256 if_false_label, NULL_RTX);
10259 emit_jump (if_true_label);
10261 if (drop_through_label)
10262 emit_label (drop_through_label);
10265 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10266 (including code to compute the values to be compared)
10267 and set (CC0) according to the result.
10268 The decision as to signed or unsigned comparison must be made by the caller.
10270 We force a stack adjustment unless there are currently
10271 things pushed on the stack that aren't yet used.
10273 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10277 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10279 enum rtx_code code;
10281 enum machine_mode mode;
10284 enum rtx_code ucode;
10287 /* If one operand is constant, make it the second one. Only do this
10288 if the other operand is not constant as well. */
10290 if (swap_commutative_operands_p (op0, op1))
10295 code = swap_condition (code);
10298 if (flag_force_mem)
10300 op0 = force_not_mem (op0);
10301 op1 = force_not_mem (op1);
10304 do_pending_stack_adjust ();
10306 ucode = unsignedp ? unsigned_condition (code) : code;
10307 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10311 /* There's no need to do this now that combine.c can eliminate lots of
10312 sign extensions. This can be less efficient in certain cases on other
10315 /* If this is a signed equality comparison, we can do it as an
10316 unsigned comparison since zero-extension is cheaper than sign
10317 extension and comparisons with zero are done as unsigned. This is
10318 the case even on machines that can do fast sign extension, since
10319 zero-extension is easier to combine with other operations than
10320 sign-extension is. If we are comparing against a constant, we must
10321 convert it to what it would look like unsigned. */
10322 if ((code == EQ || code == NE) && ! unsignedp
10323 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10325 if (GET_CODE (op1) == CONST_INT
10326 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10327 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10332 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10335 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10337 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10341 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10342 The decision as to signed or unsigned comparison must be made by the caller.
10344 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10348 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10349 if_false_label, if_true_label)
10351 enum rtx_code code;
10353 enum machine_mode mode;
10355 rtx if_false_label, if_true_label;
10357 enum rtx_code ucode;
10359 int dummy_true_label = 0;
10361 /* Reverse the comparison if that is safe and we want to jump if it is
10363 if (! if_true_label && ! FLOAT_MODE_P (mode))
10365 if_true_label = if_false_label;
10366 if_false_label = 0;
10367 code = reverse_condition (code);
10370 /* If one operand is constant, make it the second one. Only do this
10371 if the other operand is not constant as well. */
10373 if (swap_commutative_operands_p (op0, op1))
10378 code = swap_condition (code);
10381 if (flag_force_mem)
10383 op0 = force_not_mem (op0);
10384 op1 = force_not_mem (op1);
10387 do_pending_stack_adjust ();
10389 ucode = unsignedp ? unsigned_condition (code) : code;
10390 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10392 if (tem == const_true_rtx)
10395 emit_jump (if_true_label);
10399 if (if_false_label)
10400 emit_jump (if_false_label);
10406 /* There's no need to do this now that combine.c can eliminate lots of
10407 sign extensions. This can be less efficient in certain cases on other
10410 /* If this is a signed equality comparison, we can do it as an
10411 unsigned comparison since zero-extension is cheaper than sign
10412 extension and comparisons with zero are done as unsigned. This is
10413 the case even on machines that can do fast sign extension, since
10414 zero-extension is easier to combine with other operations than
10415 sign-extension is. If we are comparing against a constant, we must
10416 convert it to what it would look like unsigned. */
10417 if ((code == EQ || code == NE) && ! unsignedp
10418 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10420 if (GET_CODE (op1) == CONST_INT
10421 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10422 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10427 if (! if_true_label)
10429 dummy_true_label = 1;
10430 if_true_label = gen_label_rtx ();
10433 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10436 if (if_false_label)
10437 emit_jump (if_false_label);
10438 if (dummy_true_label)
10439 emit_label (if_true_label);
10442 /* Generate code for a comparison expression EXP (including code to compute
10443 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10444 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10445 generated code will drop through.
10446 SIGNED_CODE should be the rtx operation for this comparison for
10447 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10449 We force a stack adjustment unless there are currently
10450 things pushed on the stack that aren't yet used. */
10453 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10456 enum rtx_code signed_code, unsigned_code;
10457 rtx if_false_label, if_true_label;
10461 enum machine_mode mode;
10463 enum rtx_code code;
10465 /* Don't crash if the comparison was erroneous. */
10466 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10467 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10470 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10471 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10474 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10475 mode = TYPE_MODE (type);
10476 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10477 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10478 || (GET_MODE_BITSIZE (mode)
10479 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10482 /* op0 might have been replaced by promoted constant, in which
10483 case the type of second argument should be used. */
10484 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10485 mode = TYPE_MODE (type);
10487 unsignedp = TREE_UNSIGNED (type);
10488 code = unsignedp ? unsigned_code : signed_code;
10490 #ifdef HAVE_canonicalize_funcptr_for_compare
10491 /* If function pointers need to be "canonicalized" before they can
10492 be reliably compared, then canonicalize them. */
10493 if (HAVE_canonicalize_funcptr_for_compare
10494 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10495 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10498 rtx new_op0 = gen_reg_rtx (mode);
10500 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10504 if (HAVE_canonicalize_funcptr_for_compare
10505 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10506 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10509 rtx new_op1 = gen_reg_rtx (mode);
10511 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10516 /* Do any postincrements in the expression that was tested. */
10519 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10521 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10522 if_false_label, if_true_label);
10525 /* Generate code to calculate EXP using a store-flag instruction
10526 and return an rtx for the result. EXP is either a comparison
10527 or a TRUTH_NOT_EXPR whose operand is a comparison.
10529 If TARGET is nonzero, store the result there if convenient.
10531 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10534 Return zero if there is no suitable set-flag instruction
10535 available on this machine.
10537 Once expand_expr has been called on the arguments of the comparison,
10538 we are committed to doing the store flag, since it is not safe to
10539 re-evaluate the expression. We emit the store-flag insn by calling
10540 emit_store_flag, but only expand the arguments if we have a reason
10541 to believe that emit_store_flag will be successful. If we think that
10542 it will, but it isn't, we have to simulate the store-flag with a
10543 set/jump/set sequence. */
10546 do_store_flag (exp, target, mode, only_cheap)
10549 enum machine_mode mode;
10552 enum rtx_code code;
10553 tree arg0, arg1, type;
10555 enum machine_mode operand_mode;
10559 enum insn_code icode;
10560 rtx subtarget = target;
10563 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10564 result at the end. We can't simply invert the test since it would
10565 have already been inverted if it were valid. This case occurs for
10566 some floating-point comparisons. */
10568 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10569 invert = 1, exp = TREE_OPERAND (exp, 0);
10571 arg0 = TREE_OPERAND (exp, 0);
10572 arg1 = TREE_OPERAND (exp, 1);
10574 /* Don't crash if the comparison was erroneous. */
10575 if (arg0 == error_mark_node || arg1 == error_mark_node)
10578 type = TREE_TYPE (arg0);
10579 operand_mode = TYPE_MODE (type);
10580 unsignedp = TREE_UNSIGNED (type);
10582 /* We won't bother with BLKmode store-flag operations because it would mean
10583 passing a lot of information to emit_store_flag. */
10584 if (operand_mode == BLKmode)
10587 /* We won't bother with store-flag operations involving function pointers
10588 when function pointers must be canonicalized before comparisons. */
10589 #ifdef HAVE_canonicalize_funcptr_for_compare
10590 if (HAVE_canonicalize_funcptr_for_compare
10591 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10592 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10594 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10595 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10596 == FUNCTION_TYPE))))
10603 /* Get the rtx comparison code to use. We know that EXP is a comparison
10604 operation of some type. Some comparisons against 1 and -1 can be
10605 converted to comparisons with zero. Do so here so that the tests
10606 below will be aware that we have a comparison with zero. These
10607 tests will not catch constants in the first operand, but constants
10608 are rarely passed as the first operand. */
10610 switch (TREE_CODE (exp))
10619 if (integer_onep (arg1))
10620 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10622 code = unsignedp ? LTU : LT;
10625 if (! unsignedp && integer_all_onesp (arg1))
10626 arg1 = integer_zero_node, code = LT;
10628 code = unsignedp ? LEU : LE;
10631 if (! unsignedp && integer_all_onesp (arg1))
10632 arg1 = integer_zero_node, code = GE;
10634 code = unsignedp ? GTU : GT;
10637 if (integer_onep (arg1))
10638 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10640 code = unsignedp ? GEU : GE;
10643 case UNORDERED_EXPR:
10669 /* Put a constant second. */
10670 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10672 tem = arg0; arg0 = arg1; arg1 = tem;
10673 code = swap_condition (code);
10676 /* If this is an equality or inequality test of a single bit, we can
10677 do this by shifting the bit being tested to the low-order bit and
10678 masking the result with the constant 1. If the condition was EQ,
10679 we xor it with 1. This does not require an scc insn and is faster
10680 than an scc insn even if we have it. */
10682 if ((code == NE || code == EQ)
10683 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10684 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10686 tree inner = TREE_OPERAND (arg0, 0);
10687 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10690 /* If INNER is a right shift of a constant and it plus BITNUM does
10691 not overflow, adjust BITNUM and INNER. */
10693 if (TREE_CODE (inner) == RSHIFT_EXPR
10694 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10695 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10696 && bitnum < TYPE_PRECISION (type)
10697 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10698 bitnum - TYPE_PRECISION (type)))
10700 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10701 inner = TREE_OPERAND (inner, 0);
10704 /* If we are going to be able to omit the AND below, we must do our
10705 operations as unsigned. If we must use the AND, we have a choice.
10706 Normally unsigned is faster, but for some machines signed is. */
10707 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10708 #ifdef LOAD_EXTEND_OP
10709 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10715 if (! get_subtarget (subtarget)
10716 || GET_MODE (subtarget) != operand_mode
10717 || ! safe_from_p (subtarget, inner, 1))
10720 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10723 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10724 size_int (bitnum), subtarget, ops_unsignedp);
10726 if (GET_MODE (op0) != mode)
10727 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10729 if ((code == EQ && ! invert) || (code == NE && invert))
10730 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10731 ops_unsignedp, OPTAB_LIB_WIDEN);
10733 /* Put the AND last so it can combine with more things. */
10734 if (bitnum != TYPE_PRECISION (type) - 1)
10735 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10740 /* Now see if we are likely to be able to do this. Return if not. */
10741 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10744 icode = setcc_gen_code[(int) code];
10745 if (icode == CODE_FOR_nothing
10746 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10748 /* We can only do this if it is one of the special cases that
10749 can be handled without an scc insn. */
10750 if ((code == LT && integer_zerop (arg1))
10751 || (! only_cheap && code == GE && integer_zerop (arg1)))
10753 else if (BRANCH_COST >= 0
10754 && ! only_cheap && (code == NE || code == EQ)
10755 && TREE_CODE (type) != REAL_TYPE
10756 && ((abs_optab->handlers[(int) operand_mode].insn_code
10757 != CODE_FOR_nothing)
10758 || (ffs_optab->handlers[(int) operand_mode].insn_code
10759 != CODE_FOR_nothing)))
10765 if (! get_subtarget (target)
10766 || GET_MODE (subtarget) != operand_mode
10767 || ! safe_from_p (subtarget, arg1, 1))
10770 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10771 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10774 target = gen_reg_rtx (mode);
10776 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10777 because, if the emit_store_flag does anything it will succeed and
10778 OP0 and OP1 will not be used subsequently. */
10780 result = emit_store_flag (target, code,
10781 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10782 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10783 operand_mode, unsignedp, 1);
10788 result = expand_binop (mode, xor_optab, result, const1_rtx,
10789 result, 0, OPTAB_LIB_WIDEN);
10793 /* If this failed, we have to do this with set/compare/jump/set code. */
10794 if (GET_CODE (target) != REG
10795 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10796 target = gen_reg_rtx (GET_MODE (target));
10798 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10799 result = compare_from_rtx (op0, op1, code, unsignedp,
10800 operand_mode, NULL_RTX);
10801 if (GET_CODE (result) == CONST_INT)
10802 return (((result == const0_rtx && ! invert)
10803 || (result != const0_rtx && invert))
10804 ? const0_rtx : const1_rtx);
10806 /* The code of RESULT may not match CODE if compare_from_rtx
10807 decided to swap its operands and reverse the original code.
10809 We know that compare_from_rtx returns either a CONST_INT or
10810 a new comparison code, so it is safe to just extract the
10811 code from RESULT. */
10812 code = GET_CODE (result);
10814 label = gen_label_rtx ();
10815 if (bcc_gen_fctn[(int) code] == 0)
10818 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10819 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10820 emit_label (label);
10826 /* Stubs in case we haven't got a casesi insn. */
10827 #ifndef HAVE_casesi
10828 # define HAVE_casesi 0
10829 # define gen_casesi(a, b, c, d, e) (0)
10830 # define CODE_FOR_casesi CODE_FOR_nothing
10833 /* If the machine does not have a case insn that compares the bounds,
10834 this means extra overhead for dispatch tables, which raises the
10835 threshold for using them. */
10836 #ifndef CASE_VALUES_THRESHOLD
10837 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10838 #endif /* CASE_VALUES_THRESHOLD */
10841 case_values_threshold ()
10843 return CASE_VALUES_THRESHOLD;
10846 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10847 0 otherwise (i.e. if there is no casesi instruction). */
10849 try_casesi (index_type, index_expr, minval, range,
10850 table_label, default_label)
10851 tree index_type, index_expr, minval, range;
10852 rtx table_label ATTRIBUTE_UNUSED;
10855 enum machine_mode index_mode = SImode;
10856 int index_bits = GET_MODE_BITSIZE (index_mode);
10857 rtx op1, op2, index;
10858 enum machine_mode op_mode;
10863 /* Convert the index to SImode. */
10864 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10866 enum machine_mode omode = TYPE_MODE (index_type);
10867 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10869 /* We must handle the endpoints in the original mode. */
10870 index_expr = build (MINUS_EXPR, index_type,
10871 index_expr, minval);
10872 minval = integer_zero_node;
10873 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10874 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10875 omode, 1, default_label);
10876 /* Now we can safely truncate. */
10877 index = convert_to_mode (index_mode, index, 0);
10881 if (TYPE_MODE (index_type) != index_mode)
10883 index_expr = convert ((*lang_hooks.types.type_for_size)
10884 (index_bits, 0), index_expr);
10885 index_type = TREE_TYPE (index_expr);
10888 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10891 index = protect_from_queue (index, 0);
10892 do_pending_stack_adjust ();
10894 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10895 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10897 index = copy_to_mode_reg (op_mode, index);
10899 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10901 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10902 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10903 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10904 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10906 op1 = copy_to_mode_reg (op_mode, op1);
10908 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10910 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10911 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10912 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10913 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10915 op2 = copy_to_mode_reg (op_mode, op2);
10917 emit_jump_insn (gen_casesi (index, op1, op2,
10918 table_label, default_label));
10922 /* Attempt to generate a tablejump instruction; same concept. */
10923 #ifndef HAVE_tablejump
10924 #define HAVE_tablejump 0
10925 #define gen_tablejump(x, y) (0)
10928 /* Subroutine of the next function.
10930 INDEX is the value being switched on, with the lowest value
10931 in the table already subtracted.
10932 MODE is its expected mode (needed if INDEX is constant).
10933 RANGE is the length of the jump table.
10934 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10936 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10937 index value is out of range. */
10940 do_tablejump (index, mode, range, table_label, default_label)
10941 rtx index, range, table_label, default_label;
10942 enum machine_mode mode;
10946 if (INTVAL (range) > cfun->max_jumptable_ents)
10947 cfun->max_jumptable_ents = INTVAL (range);
10949 /* Do an unsigned comparison (in the proper mode) between the index
10950 expression and the value which represents the length of the range.
10951 Since we just finished subtracting the lower bound of the range
10952 from the index expression, this comparison allows us to simultaneously
10953 check that the original index expression value is both greater than
10954 or equal to the minimum value of the range and less than or equal to
10955 the maximum value of the range. */
10957 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10960 /* If index is in range, it must fit in Pmode.
10961 Convert to Pmode so we can index with it. */
10963 index = convert_to_mode (Pmode, index, 1);
10965 /* Don't let a MEM slip thru, because then INDEX that comes
10966 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10967 and break_out_memory_refs will go to work on it and mess it up. */
10968 #ifdef PIC_CASE_VECTOR_ADDRESS
10969 if (flag_pic && GET_CODE (index) != REG)
10970 index = copy_to_mode_reg (Pmode, index);
10973 /* If flag_force_addr were to affect this address
10974 it could interfere with the tricky assumptions made
10975 about addresses that contain label-refs,
10976 which may be valid only very near the tablejump itself. */
10977 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10978 GET_MODE_SIZE, because this indicates how large insns are. The other
10979 uses should all be Pmode, because they are addresses. This code
10980 could fail if addresses and insns are not the same size. */
10981 index = gen_rtx_PLUS (Pmode,
10982 gen_rtx_MULT (Pmode, index,
10983 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10984 gen_rtx_LABEL_REF (Pmode, table_label));
10985 #ifdef PIC_CASE_VECTOR_ADDRESS
10987 index = PIC_CASE_VECTOR_ADDRESS (index);
10990 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10991 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10992 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10993 RTX_UNCHANGING_P (vector) = 1;
10994 convert_move (temp, vector, 0);
10996 emit_jump_insn (gen_tablejump (temp, table_label));
10998 /* If we are generating PIC code or if the table is PC-relative, the
10999 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11000 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11005 try_tablejump (index_type, index_expr, minval, range,
11006 table_label, default_label)
11007 tree index_type, index_expr, minval, range;
11008 rtx table_label, default_label;
11012 if (! HAVE_tablejump)
11015 index_expr = fold (build (MINUS_EXPR, index_type,
11016 convert (index_type, index_expr),
11017 convert (index_type, minval)));
11018 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11020 index = protect_from_queue (index, 0);
11021 do_pending_stack_adjust ();
11023 do_tablejump (index, TYPE_MODE (index_type),
11024 convert_modes (TYPE_MODE (index_type),
11025 TYPE_MODE (TREE_TYPE (range)),
11026 expand_expr (range, NULL_RTX,
11028 TREE_UNSIGNED (TREE_TYPE (range))),
11029 table_label, default_label);
11033 /* Nonzero if the mode is a valid vector mode for this architecture.
11034 This returns nonzero even if there is no hardware support for the
11035 vector mode, but we can emulate with narrower modes. */
11038 vector_mode_valid_p (mode)
11039 enum machine_mode mode;
11041 enum mode_class class = GET_MODE_CLASS (mode);
11042 enum machine_mode innermode;
11044 /* Doh! What's going on? */
11045 if (class != MODE_VECTOR_INT
11046 && class != MODE_VECTOR_FLOAT)
11049 /* Hardware support. Woo hoo! */
11050 if (VECTOR_MODE_SUPPORTED_P (mode))
11053 innermode = GET_MODE_INNER (mode);
11055 /* We should probably return 1 if requesting V4DI and we have no DI,
11056 but we have V2DI, but this is probably very unlikely. */
11058 /* If we have support for the inner mode, we can safely emulate it.
11059 We may not have V2DI, but me can emulate with a pair of DIs. */
11060 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
11063 #include "gt-expr.h"