1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first. */
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
65 #define STACK_PUSH_CODE PRE_INC
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
74 /* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
81 int (*lang_safe_from_p) PARAMS ((rtx, tree));
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* Don't check memory usage, since code is being emitted to check a memory
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
94 static int in_check_memory_usage;
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
99 /* This structure is used by move_by_pieces to describe the move to
101 struct move_by_pieces
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
116 /* This structure is used by store_by_pieces to describe the clear to
119 struct store_by_pieces
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
132 extern struct obstack permanent_obstack;
134 static rtx get_push_address PARAMS ((int));
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
140 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
150 struct store_by_pieces *));
151 static rtx get_subtarget PARAMS ((rtx));
152 static int is_zeros_p PARAMS ((tree));
153 static int mostly_zeros_p PARAMS ((tree));
154 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, unsigned int, int,
158 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int,
163 unsigned int, HOST_WIDE_INT, int));
164 static enum memory_use_mode
165 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
166 static rtx var_rtx PARAMS ((tree));
167 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
168 static rtx expand_increment PARAMS ((tree, int, int));
169 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
170 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
171 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
179 /* Record for each mode whether we can move a register directly to or
180 from an object of that mode in memory. If we can't, we won't try
181 to use that mode directly when accessing a field of that mode. */
183 static char direct_load[NUM_MACHINE_MODES];
184 static char direct_store[NUM_MACHINE_MODES];
186 /* If a memory-to-memory move would take MOVE_RATIO or more simple
187 move-instruction sequences, we will do a movstr or libcall instead. */
190 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
193 /* If we are optimizing for space (-Os), cut down the default move ratio. */
194 #define MOVE_RATIO (optimize_size ? 3 : 15)
198 /* This macro is used to determine whether move_by_pieces should be called
199 to perform a structure copy. */
200 #ifndef MOVE_BY_PIECES_P
201 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movstr_optab[NUM_MACHINE_MODES];
208 /* This array records the insn_code of insns to perform block clears. */
209 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
211 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
213 #ifndef SLOW_UNALIGNED_ACCESS
214 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
217 /* This is run once per compilation to set up which modes can be used
218 directly in memory and to initialize the block move optab. */
224 enum machine_mode mode;
230 /* Try indexing by frame ptr and try by stack ptr.
231 It is known that on the Convex the stack ptr isn't a valid index.
232 With luck, one or the other is valid on any machine. */
233 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
234 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
236 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
237 pat = PATTERN (insn);
239 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
240 mode = (enum machine_mode) ((int) mode + 1))
245 direct_load[(int) mode] = direct_store[(int) mode] = 0;
246 PUT_MODE (mem, mode);
247 PUT_MODE (mem1, mode);
249 /* See if there is some register that can be used in this mode and
250 directly loaded or stored from memory. */
252 if (mode != VOIDmode && mode != BLKmode)
253 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
254 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
257 if (! HARD_REGNO_MODE_OK (regno, mode))
260 reg = gen_rtx_REG (mode, regno);
263 SET_DEST (pat) = reg;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_load[(int) mode] = 1;
267 SET_SRC (pat) = mem1;
268 SET_DEST (pat) = reg;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_load[(int) mode] = 1;
273 SET_DEST (pat) = mem;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_store[(int) mode] = 1;
278 SET_DEST (pat) = mem1;
279 if (recog (pat, insn, &num_clobbers) >= 0)
280 direct_store[(int) mode] = 1;
287 /* This is run at the start of compiling a function. */
292 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
295 pending_stack_adjust = 0;
296 stack_pointer_delta = 0;
297 inhibit_defer_pop = 0;
299 apply_args_value = 0;
305 struct expr_status *p;
310 ggc_mark_rtx (p->x_saveregs_value);
311 ggc_mark_rtx (p->x_apply_args_value);
312 ggc_mark_rtx (p->x_forced_labels);
323 /* Small sanity check that the queue is empty at the end of a function. */
326 finish_expr_for_function ()
332 /* Manage the queue of increment instructions to be output
333 for POSTINCREMENT_EXPR expressions, etc. */
335 /* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
343 enqueue_insn (var, body)
346 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
347 body, pending_chain);
348 return pending_chain;
351 /* Use protect_from_queue to convert a QUEUED expression
352 into something that you can put immediately into an instruction.
353 If the queued incrementation has not happened yet,
354 protect_from_queue returns the variable itself.
355 If the incrementation has happened, protect_from_queue returns a temp
356 that contains a copy of the old value of the variable.
358 Any time an rtx which might possibly be a QUEUED is to be put
359 into an instruction, it must be passed through protect_from_queue first.
360 QUEUED expressions are not meaningful in instructions.
362 Do not pass a value through protect_from_queue and then hold
363 on to it for a while before putting it in an instruction!
364 If the queue is flushed in between, incorrect code will result. */
367 protect_from_queue (x, modify)
371 RTX_CODE code = GET_CODE (x);
373 #if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain == 0)
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
386 if (code == MEM && GET_MODE (x) != BLKmode
387 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
390 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
394 rtx temp = gen_reg_rtx (GET_MODE (x));
396 emit_insn_before (gen_move_insn (temp, new),
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
410 rtx tem = protect_from_queue (XEXP (x, 0), 0);
411 if (tem != XEXP (x, 0))
417 else if (code == PLUS || code == MULT)
419 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
420 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
421 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
433 if (QUEUED_INSN (x) == 0)
434 return copy_to_reg (QUEUED_VAR (x));
435 /* If the increment has happened and a pre-increment copy exists,
437 if (QUEUED_COPY (x) != 0)
438 return QUEUED_COPY (x);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
444 return QUEUED_COPY (x);
447 /* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
456 enum rtx_code code = GET_CODE (x);
462 return queued_subexp_p (XEXP (x, 0));
466 return (queued_subexp_p (XEXP (x, 0))
467 || queued_subexp_p (XEXP (x, 1)));
473 /* Perform all the pending incrementations. */
479 while ((p = pending_chain))
481 rtx body = QUEUED_BODY (p);
483 if (GET_CODE (body) == SEQUENCE)
485 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
486 emit_insn (QUEUED_BODY (p));
489 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
490 pending_chain = QUEUED_NEXT (p);
494 /* Copy data from FROM to TO, where the machine modes are not the same.
495 Both modes may be integer, or both may be floating.
496 UNSIGNEDP should be nonzero if FROM is an unsigned type.
497 This causes zero-extension instead of sign-extension. */
500 convert_move (to, from, unsignedp)
504 enum machine_mode to_mode = GET_MODE (to);
505 enum machine_mode from_mode = GET_MODE (from);
506 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
507 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
511 /* rtx code for making an equivalent value. */
512 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
514 to = protect_from_queue (to, 1);
515 from = protect_from_queue (from, 0);
517 if (to_real != from_real)
520 /* If FROM is a SUBREG that indicates that we have already done at least
521 the required extension, strip it. We don't handle such SUBREGs as
524 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
525 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
526 >= GET_MODE_SIZE (to_mode))
527 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
528 from = gen_lowpart (to_mode, from), from_mode = to_mode;
530 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
533 if (to_mode == from_mode
534 || (from_mode == VOIDmode && CONSTANT_P (from)))
536 emit_move_insn (to, from);
540 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
542 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
545 if (VECTOR_MODE_P (to_mode))
546 from = gen_rtx_SUBREG (to_mode, from, 0);
548 to = gen_rtx_SUBREG (from_mode, to, 0);
550 emit_move_insn (to, from);
554 if (to_real != from_real)
561 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
563 /* Try converting directly if the insn is supported. */
564 if ((code = can_extend_p (to_mode, from_mode, 0))
567 emit_unop_insn (code, to, from, UNKNOWN);
572 #ifdef HAVE_trunchfqf2
573 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
575 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
579 #ifdef HAVE_trunctqfqf2
580 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
582 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
586 #ifdef HAVE_truncsfqf2
587 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
589 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
593 #ifdef HAVE_truncdfqf2
594 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
596 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
600 #ifdef HAVE_truncxfqf2
601 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
603 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
607 #ifdef HAVE_trunctfqf2
608 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
610 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
615 #ifdef HAVE_trunctqfhf2
616 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
618 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
622 #ifdef HAVE_truncsfhf2
623 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
625 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
629 #ifdef HAVE_truncdfhf2
630 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
632 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
636 #ifdef HAVE_truncxfhf2
637 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
639 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
643 #ifdef HAVE_trunctfhf2
644 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
646 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
651 #ifdef HAVE_truncsftqf2
652 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
654 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
658 #ifdef HAVE_truncdftqf2
659 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
661 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
665 #ifdef HAVE_truncxftqf2
666 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
668 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
672 #ifdef HAVE_trunctftqf2
673 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
675 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
680 #ifdef HAVE_truncdfsf2
681 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
683 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
687 #ifdef HAVE_truncxfsf2
688 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
690 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
694 #ifdef HAVE_trunctfsf2
695 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
697 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
701 #ifdef HAVE_truncxfdf2
702 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
704 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
708 #ifdef HAVE_trunctfdf2
709 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
711 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
723 libcall = extendsfdf2_libfunc;
727 libcall = extendsfxf2_libfunc;
731 libcall = extendsftf2_libfunc;
743 libcall = truncdfsf2_libfunc;
747 libcall = extenddfxf2_libfunc;
751 libcall = extenddftf2_libfunc;
763 libcall = truncxfsf2_libfunc;
767 libcall = truncxfdf2_libfunc;
779 libcall = trunctfsf2_libfunc;
783 libcall = trunctfdf2_libfunc;
795 if (libcall == (rtx) 0)
796 /* This conversion is not implemented yet. */
800 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
802 insns = get_insns ();
804 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
809 /* Now both modes are integers. */
811 /* Handle expanding beyond a word. */
812 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
813 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
820 enum machine_mode lowpart_mode;
821 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
823 /* Try converting directly if the insn is supported. */
824 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
827 /* If FROM is a SUBREG, put it into a register. Do this
828 so that we always generate the same set of insns for
829 better cse'ing; if an intermediate assignment occurred,
830 we won't be doing the operation directly on the SUBREG. */
831 if (optimize > 0 && GET_CODE (from) == SUBREG)
832 from = force_reg (from_mode, from);
833 emit_unop_insn (code, to, from, equiv_code);
836 /* Next, try converting via full word. */
837 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
838 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
839 != CODE_FOR_nothing))
841 if (GET_CODE (to) == REG)
842 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
843 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
844 emit_unop_insn (code, to,
845 gen_lowpart (word_mode, to), equiv_code);
849 /* No special multiword conversion insn; do it by hand. */
852 /* Since we will turn this into a no conflict block, we must ensure
853 that the source does not overlap the target. */
855 if (reg_overlap_mentioned_p (to, from))
856 from = force_reg (from_mode, from);
858 /* Get a copy of FROM widened to a word, if necessary. */
859 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
860 lowpart_mode = word_mode;
862 lowpart_mode = from_mode;
864 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
866 lowpart = gen_lowpart (lowpart_mode, to);
867 emit_move_insn (lowpart, lowfrom);
869 /* Compute the value to put in each remaining word. */
871 fill_value = const0_rtx;
876 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
877 && STORE_FLAG_VALUE == -1)
879 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
881 fill_value = gen_reg_rtx (word_mode);
882 emit_insn (gen_slt (fill_value));
888 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
889 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
891 fill_value = convert_to_mode (word_mode, fill_value, 1);
895 /* Fill the remaining words. */
896 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
898 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
899 rtx subword = operand_subword (to, index, 1, to_mode);
904 if (fill_value != subword)
905 emit_move_insn (subword, fill_value);
908 insns = get_insns ();
911 emit_no_conflict_block (insns, to, from, NULL_RTX,
912 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
916 /* Truncating multi-word to a word or less. */
917 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
918 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
920 if (!((GET_CODE (from) == MEM
921 && ! MEM_VOLATILE_P (from)
922 && direct_load[(int) to_mode]
923 && ! mode_dependent_address_p (XEXP (from, 0)))
924 || GET_CODE (from) == REG
925 || GET_CODE (from) == SUBREG))
926 from = force_reg (from_mode, from);
927 convert_move (to, gen_lowpart (word_mode, from), 0);
931 /* Handle pointer conversion. */ /* SPEE 900220. */
932 if (to_mode == PQImode)
934 if (from_mode != QImode)
935 from = convert_to_mode (QImode, from, unsignedp);
937 #ifdef HAVE_truncqipqi2
938 if (HAVE_truncqipqi2)
940 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
943 #endif /* HAVE_truncqipqi2 */
947 if (from_mode == PQImode)
949 if (to_mode != QImode)
951 from = convert_to_mode (QImode, from, unsignedp);
956 #ifdef HAVE_extendpqiqi2
957 if (HAVE_extendpqiqi2)
959 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
962 #endif /* HAVE_extendpqiqi2 */
967 if (to_mode == PSImode)
969 if (from_mode != SImode)
970 from = convert_to_mode (SImode, from, unsignedp);
972 #ifdef HAVE_truncsipsi2
973 if (HAVE_truncsipsi2)
975 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
978 #endif /* HAVE_truncsipsi2 */
982 if (from_mode == PSImode)
984 if (to_mode != SImode)
986 from = convert_to_mode (SImode, from, unsignedp);
991 #ifdef HAVE_extendpsisi2
992 if (! unsignedp && HAVE_extendpsisi2)
994 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
997 #endif /* HAVE_extendpsisi2 */
998 #ifdef HAVE_zero_extendpsisi2
999 if (unsignedp && HAVE_zero_extendpsisi2)
1001 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1004 #endif /* HAVE_zero_extendpsisi2 */
1009 if (to_mode == PDImode)
1011 if (from_mode != DImode)
1012 from = convert_to_mode (DImode, from, unsignedp);
1014 #ifdef HAVE_truncdipdi2
1015 if (HAVE_truncdipdi2)
1017 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1020 #endif /* HAVE_truncdipdi2 */
1024 if (from_mode == PDImode)
1026 if (to_mode != DImode)
1028 from = convert_to_mode (DImode, from, unsignedp);
1033 #ifdef HAVE_extendpdidi2
1034 if (HAVE_extendpdidi2)
1036 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1039 #endif /* HAVE_extendpdidi2 */
1044 /* Now follow all the conversions between integers
1045 no more than a word long. */
1047 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1048 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1049 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1050 GET_MODE_BITSIZE (from_mode)))
1052 if (!((GET_CODE (from) == MEM
1053 && ! MEM_VOLATILE_P (from)
1054 && direct_load[(int) to_mode]
1055 && ! mode_dependent_address_p (XEXP (from, 0)))
1056 || GET_CODE (from) == REG
1057 || GET_CODE (from) == SUBREG))
1058 from = force_reg (from_mode, from);
1059 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1060 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1061 from = copy_to_reg (from);
1062 emit_move_insn (to, gen_lowpart (to_mode, from));
1066 /* Handle extension. */
1067 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1069 /* Convert directly if that works. */
1070 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1071 != CODE_FOR_nothing)
1073 emit_unop_insn (code, to, from, equiv_code);
1078 enum machine_mode intermediate;
1082 /* Search for a mode to convert via. */
1083 for (intermediate = from_mode; intermediate != VOIDmode;
1084 intermediate = GET_MODE_WIDER_MODE (intermediate))
1085 if (((can_extend_p (to_mode, intermediate, unsignedp)
1086 != CODE_FOR_nothing)
1087 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1088 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1089 GET_MODE_BITSIZE (intermediate))))
1090 && (can_extend_p (intermediate, from_mode, unsignedp)
1091 != CODE_FOR_nothing))
1093 convert_move (to, convert_to_mode (intermediate, from,
1094 unsignedp), unsignedp);
1098 /* No suitable intermediate mode.
1099 Generate what we need with shifts. */
1100 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1101 - GET_MODE_BITSIZE (from_mode), 0);
1102 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1103 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1105 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1108 emit_move_insn (to, tmp);
1113 /* Support special truncate insns for certain modes. */
1115 if (from_mode == DImode && to_mode == SImode)
1117 #ifdef HAVE_truncdisi2
1118 if (HAVE_truncdisi2)
1120 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1124 convert_move (to, force_reg (from_mode, from), unsignedp);
1128 if (from_mode == DImode && to_mode == HImode)
1130 #ifdef HAVE_truncdihi2
1131 if (HAVE_truncdihi2)
1133 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1137 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 if (from_mode == DImode && to_mode == QImode)
1143 #ifdef HAVE_truncdiqi2
1144 if (HAVE_truncdiqi2)
1146 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1150 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 if (from_mode == SImode && to_mode == HImode)
1156 #ifdef HAVE_truncsihi2
1157 if (HAVE_truncsihi2)
1159 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1163 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 if (from_mode == SImode && to_mode == QImode)
1169 #ifdef HAVE_truncsiqi2
1170 if (HAVE_truncsiqi2)
1172 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1176 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 if (from_mode == HImode && to_mode == QImode)
1182 #ifdef HAVE_trunchiqi2
1183 if (HAVE_trunchiqi2)
1185 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1189 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 if (from_mode == TImode && to_mode == DImode)
1195 #ifdef HAVE_trunctidi2
1196 if (HAVE_trunctidi2)
1198 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1202 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 if (from_mode == TImode && to_mode == SImode)
1208 #ifdef HAVE_trunctisi2
1209 if (HAVE_trunctisi2)
1211 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1215 convert_move (to, force_reg (from_mode, from), unsignedp);
1219 if (from_mode == TImode && to_mode == HImode)
1221 #ifdef HAVE_trunctihi2
1222 if (HAVE_trunctihi2)
1224 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1228 convert_move (to, force_reg (from_mode, from), unsignedp);
1232 if (from_mode == TImode && to_mode == QImode)
1234 #ifdef HAVE_trunctiqi2
1235 if (HAVE_trunctiqi2)
1237 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1241 convert_move (to, force_reg (from_mode, from), unsignedp);
1245 /* Handle truncation of volatile memrefs, and so on;
1246 the things that couldn't be truncated directly,
1247 and for which there was no special instruction. */
1248 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1250 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1251 emit_move_insn (to, temp);
1255 /* Mode combination is not recognized. */
1259 /* Return an rtx for a value that would result
1260 from converting X to mode MODE.
1261 Both X and MODE may be floating, or both integer.
1262 UNSIGNEDP is nonzero if X is an unsigned value.
1263 This can be done by referring to a part of X in place
1264 or by copying to a new temporary with conversion.
1266 This function *must not* call protect_from_queue
1267 except when putting X into an insn (in which case convert_move does it). */
1270 convert_to_mode (mode, x, unsignedp)
1271 enum machine_mode mode;
1275 return convert_modes (mode, VOIDmode, x, unsignedp);
1278 /* Return an rtx for a value that would result
1279 from converting X from mode OLDMODE to mode MODE.
1280 Both modes may be floating, or both integer.
1281 UNSIGNEDP is nonzero if X is an unsigned value.
1283 This can be done by referring to a part of X in place
1284 or by copying to a new temporary with conversion.
1286 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1288 This function *must not* call protect_from_queue
1289 except when putting X into an insn (in which case convert_move does it). */
1292 convert_modes (mode, oldmode, x, unsignedp)
1293 enum machine_mode mode, oldmode;
1299 /* If FROM is a SUBREG that indicates that we have already done at least
1300 the required extension, strip it. */
1302 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1303 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1304 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1305 x = gen_lowpart (mode, x);
1307 if (GET_MODE (x) != VOIDmode)
1308 oldmode = GET_MODE (x);
1310 if (mode == oldmode)
1313 /* There is one case that we must handle specially: If we are converting
1314 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1315 we are to interpret the constant as unsigned, gen_lowpart will do
1316 the wrong if the constant appears negative. What we want to do is
1317 make the high-order word of the constant zero, not all ones. */
1319 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1320 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1321 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1323 HOST_WIDE_INT val = INTVAL (x);
1325 if (oldmode != VOIDmode
1326 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1328 int width = GET_MODE_BITSIZE (oldmode);
1330 /* We need to zero extend VAL. */
1331 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1334 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1337 /* We can do this with a gen_lowpart if both desired and current modes
1338 are integer, and this is either a constant integer, a register, or a
1339 non-volatile MEM. Except for the constant case where MODE is no
1340 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1342 if ((GET_CODE (x) == CONST_INT
1343 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1344 || (GET_MODE_CLASS (mode) == MODE_INT
1345 && GET_MODE_CLASS (oldmode) == MODE_INT
1346 && (GET_CODE (x) == CONST_DOUBLE
1347 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1348 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1349 && direct_load[(int) mode])
1350 || (GET_CODE (x) == REG
1351 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1352 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1354 /* ?? If we don't know OLDMODE, we have to assume here that
1355 X does not need sign- or zero-extension. This may not be
1356 the case, but it's the best we can do. */
1357 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1358 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1360 HOST_WIDE_INT val = INTVAL (x);
1361 int width = GET_MODE_BITSIZE (oldmode);
1363 /* We must sign or zero-extend in this case. Start by
1364 zero-extending, then sign extend if we need to. */
1365 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1367 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1368 val |= (HOST_WIDE_INT) (-1) << width;
1370 return GEN_INT (trunc_int_for_mode (val, mode));
1373 return gen_lowpart (mode, x);
1376 temp = gen_reg_rtx (mode);
1377 convert_move (temp, x, unsignedp);
1381 /* This macro is used to determine what the largest unit size that
1382 move_by_pieces can use is. */
1384 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1385 move efficiently, as opposed to MOVE_MAX which is the maximum
1386 number of bytes we can move with a single instruction. */
1388 #ifndef MOVE_MAX_PIECES
1389 #define MOVE_MAX_PIECES MOVE_MAX
1392 /* Generate several move instructions to copy LEN bytes from block FROM to
1393 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1394 and TO through protect_from_queue before calling.
1396 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1397 used to push FROM to the stack.
1399 ALIGN is maximum alignment we can assume. */
1402 move_by_pieces (to, from, len, align)
1404 unsigned HOST_WIDE_INT len;
1407 struct move_by_pieces data;
1408 rtx to_addr, from_addr = XEXP (from, 0);
1409 unsigned int max_size = MOVE_MAX_PIECES + 1;
1410 enum machine_mode mode = VOIDmode, tmode;
1411 enum insn_code icode;
1414 data.from_addr = from_addr;
1417 to_addr = XEXP (to, 0);
1420 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1421 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1423 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1430 #ifdef STACK_GROWS_DOWNWARD
1436 data.to_addr = to_addr;
1439 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1440 || GET_CODE (from_addr) == POST_INC
1441 || GET_CODE (from_addr) == POST_DEC);
1443 data.explicit_inc_from = 0;
1444 data.explicit_inc_to = 0;
1445 if (data.reverse) data.offset = len;
1448 /* If copying requires more than two move insns,
1449 copy addresses to registers (to make displacements shorter)
1450 and use post-increment if available. */
1451 if (!(data.autinc_from && data.autinc_to)
1452 && move_by_pieces_ninsns (len, align) > 2)
1454 /* Find the mode of the largest move... */
1455 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1456 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1457 if (GET_MODE_SIZE (tmode) < max_size)
1460 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1462 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1463 data.autinc_from = 1;
1464 data.explicit_inc_from = -1;
1466 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1468 data.from_addr = copy_addr_to_reg (from_addr);
1469 data.autinc_from = 1;
1470 data.explicit_inc_from = 1;
1472 if (!data.autinc_from && CONSTANT_P (from_addr))
1473 data.from_addr = copy_addr_to_reg (from_addr);
1474 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1476 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1478 data.explicit_inc_to = -1;
1480 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1482 data.to_addr = copy_addr_to_reg (to_addr);
1484 data.explicit_inc_to = 1;
1486 if (!data.autinc_to && CONSTANT_P (to_addr))
1487 data.to_addr = copy_addr_to_reg (to_addr);
1490 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1491 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1492 align = MOVE_MAX * BITS_PER_UNIT;
1494 /* First move what we can in the largest integer mode, then go to
1495 successively smaller modes. */
1497 while (max_size > 1)
1499 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1500 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1501 if (GET_MODE_SIZE (tmode) < max_size)
1504 if (mode == VOIDmode)
1507 icode = mov_optab->handlers[(int) mode].insn_code;
1508 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1509 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1511 max_size = GET_MODE_SIZE (mode);
1514 /* The code above should have handled everything. */
1519 /* Return number of insns required to move L bytes by pieces.
1520 ALIGN (in bits) is maximum alignment we can assume. */
1522 static unsigned HOST_WIDE_INT
1523 move_by_pieces_ninsns (l, align)
1524 unsigned HOST_WIDE_INT l;
1527 unsigned HOST_WIDE_INT n_insns = 0;
1528 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1530 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1531 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1532 align = MOVE_MAX * BITS_PER_UNIT;
1534 while (max_size > 1)
1536 enum machine_mode mode = VOIDmode, tmode;
1537 enum insn_code icode;
1539 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1540 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1541 if (GET_MODE_SIZE (tmode) < max_size)
1544 if (mode == VOIDmode)
1547 icode = mov_optab->handlers[(int) mode].insn_code;
1548 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1549 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1551 max_size = GET_MODE_SIZE (mode);
1559 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1560 with move instructions for mode MODE. GENFUN is the gen_... function
1561 to make a move insn for that mode. DATA has all the other info. */
1564 move_by_pieces_1 (genfun, mode, data)
1565 rtx (*genfun) PARAMS ((rtx, ...));
1566 enum machine_mode mode;
1567 struct move_by_pieces *data;
1569 unsigned int size = GET_MODE_SIZE (mode);
1570 rtx to1 = NULL_RTX, from1;
1572 while (data->len >= size)
1575 data->offset -= size;
1579 if (data->autinc_to)
1581 to1 = replace_equiv_address (data->to, data->to_addr);
1582 to1 = adjust_address (to1, mode, 0);
1585 to1 = adjust_address (data->to, mode, data->offset);
1588 if (data->autinc_from)
1590 from1 = replace_equiv_address (data->from, data->from_addr);
1591 from1 = adjust_address (from1, mode, 0);
1594 from1 = adjust_address (data->from, mode, data->offset);
1596 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1597 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1598 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1599 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1602 emit_insn ((*genfun) (to1, from1));
1605 #ifdef PUSH_ROUNDING
1606 emit_single_push_insn (mode, from1, NULL);
1612 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1613 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1614 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1615 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1617 if (! data->reverse)
1618 data->offset += size;
1624 /* Emit code to move a block Y to a block X.
1625 This may be done with string-move instructions,
1626 with multiple scalar move instructions, or with a library call.
1628 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1630 SIZE is an rtx that says how long they are.
1631 ALIGN is the maximum alignment we can assume they have.
1633 Return the address of the new block, if memcpy is called and returns it,
1637 emit_block_move (x, y, size, align)
1643 #ifdef TARGET_MEM_FUNCTIONS
1645 tree call_expr, arg_list;
1648 if (GET_MODE (x) != BLKmode)
1651 if (GET_MODE (y) != BLKmode)
1654 x = protect_from_queue (x, 1);
1655 y = protect_from_queue (y, 0);
1656 size = protect_from_queue (size, 0);
1658 if (GET_CODE (x) != MEM)
1660 if (GET_CODE (y) != MEM)
1665 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1666 move_by_pieces (x, y, INTVAL (size), align);
1669 /* Try the most limited insn first, because there's no point
1670 including more than one in the machine description unless
1671 the more limited one has some advantage. */
1673 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1674 enum machine_mode mode;
1676 /* Since this is a move insn, we don't care about volatility. */
1679 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1680 mode = GET_MODE_WIDER_MODE (mode))
1682 enum insn_code code = movstr_optab[(int) mode];
1683 insn_operand_predicate_fn pred;
1685 if (code != CODE_FOR_nothing
1686 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1687 here because if SIZE is less than the mode mask, as it is
1688 returned by the macro, it will definitely be less than the
1689 actual mode mask. */
1690 && ((GET_CODE (size) == CONST_INT
1691 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1692 <= (GET_MODE_MASK (mode) >> 1)))
1693 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1694 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1695 || (*pred) (x, BLKmode))
1696 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1697 || (*pred) (y, BLKmode))
1698 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1699 || (*pred) (opalign, VOIDmode)))
1702 rtx last = get_last_insn ();
1705 op2 = convert_to_mode (mode, size, 1);
1706 pred = insn_data[(int) code].operand[2].predicate;
1707 if (pred != 0 && ! (*pred) (op2, mode))
1708 op2 = copy_to_mode_reg (mode, op2);
1710 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1718 delete_insns_since (last);
1724 /* X, Y, or SIZE may have been passed through protect_from_queue.
1726 It is unsafe to save the value generated by protect_from_queue
1727 and reuse it later. Consider what happens if emit_queue is
1728 called before the return value from protect_from_queue is used.
1730 Expansion of the CALL_EXPR below will call emit_queue before
1731 we are finished emitting RTL for argument setup. So if we are
1732 not careful we could get the wrong value for an argument.
1734 To avoid this problem we go ahead and emit code to copy X, Y &
1735 SIZE into new pseudos. We can then place those new pseudos
1736 into an RTL_EXPR and use them later, even after a call to
1739 Note this is not strictly needed for library calls since they
1740 do not call emit_queue before loading their arguments. However,
1741 we may need to have library calls call emit_queue in the future
1742 since failing to do so could cause problems for targets which
1743 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1744 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1745 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1747 #ifdef TARGET_MEM_FUNCTIONS
1748 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1750 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1751 TREE_UNSIGNED (integer_type_node));
1752 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1755 #ifdef TARGET_MEM_FUNCTIONS
1756 /* It is incorrect to use the libcall calling conventions to call
1757 memcpy in this context.
1759 This could be a user call to memcpy and the user may wish to
1760 examine the return value from memcpy.
1762 For targets where libcalls and normal calls have different conventions
1763 for returning pointers, we could end up generating incorrect code.
1765 So instead of using a libcall sequence we build up a suitable
1766 CALL_EXPR and expand the call in the normal fashion. */
1767 if (fn == NULL_TREE)
1771 /* This was copied from except.c, I don't know if all this is
1772 necessary in this context or not. */
1773 fn = get_identifier ("memcpy");
1774 fntype = build_pointer_type (void_type_node);
1775 fntype = build_function_type (fntype, NULL_TREE);
1776 fn = build_decl (FUNCTION_DECL, fn, fntype);
1777 ggc_add_tree_root (&fn, 1);
1778 DECL_EXTERNAL (fn) = 1;
1779 TREE_PUBLIC (fn) = 1;
1780 DECL_ARTIFICIAL (fn) = 1;
1781 TREE_NOTHROW (fn) = 1;
1782 make_decl_rtl (fn, NULL);
1783 assemble_external (fn);
1786 /* We need to make an argument list for the function call.
1788 memcpy has three arguments, the first two are void * addresses and
1789 the last is a size_t byte count for the copy. */
1791 = build_tree_list (NULL_TREE,
1792 make_tree (build_pointer_type (void_type_node), x));
1793 TREE_CHAIN (arg_list)
1794 = build_tree_list (NULL_TREE,
1795 make_tree (build_pointer_type (void_type_node), y));
1796 TREE_CHAIN (TREE_CHAIN (arg_list))
1797 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1798 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1800 /* Now we have to build up the CALL_EXPR itself. */
1801 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1802 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1803 call_expr, arg_list, NULL_TREE);
1804 TREE_SIDE_EFFECTS (call_expr) = 1;
1806 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1808 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1809 VOIDmode, 3, y, Pmode, x, Pmode,
1810 convert_to_mode (TYPE_MODE (integer_type_node), size,
1811 TREE_UNSIGNED (integer_type_node)),
1812 TYPE_MODE (integer_type_node));
1819 /* Copy all or part of a value X into registers starting at REGNO.
1820 The number of registers to be filled is NREGS. */
1823 move_block_to_reg (regno, x, nregs, mode)
1827 enum machine_mode mode;
1830 #ifdef HAVE_load_multiple
1838 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1839 x = validize_mem (force_const_mem (mode, x));
1841 /* See if the machine can do this with a load multiple insn. */
1842 #ifdef HAVE_load_multiple
1843 if (HAVE_load_multiple)
1845 last = get_last_insn ();
1846 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1854 delete_insns_since (last);
1858 for (i = 0; i < nregs; i++)
1859 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1860 operand_subword_force (x, i, mode));
1863 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1864 The number of registers to be filled is NREGS. SIZE indicates the number
1865 of bytes in the object X. */
1868 move_block_from_reg (regno, x, nregs, size)
1875 #ifdef HAVE_store_multiple
1879 enum machine_mode mode;
1884 /* If SIZE is that of a mode no bigger than a word, just use that
1885 mode's store operation. */
1886 if (size <= UNITS_PER_WORD
1887 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1889 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1893 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1894 to the left before storing to memory. Note that the previous test
1895 doesn't handle all cases (e.g. SIZE == 3). */
1896 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1898 rtx tem = operand_subword (x, 0, 1, BLKmode);
1904 shift = expand_shift (LSHIFT_EXPR, word_mode,
1905 gen_rtx_REG (word_mode, regno),
1906 build_int_2 ((UNITS_PER_WORD - size)
1907 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1908 emit_move_insn (tem, shift);
1912 /* See if the machine can do this with a store multiple insn. */
1913 #ifdef HAVE_store_multiple
1914 if (HAVE_store_multiple)
1916 last = get_last_insn ();
1917 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1925 delete_insns_since (last);
1929 for (i = 0; i < nregs; i++)
1931 rtx tem = operand_subword (x, i, 1, BLKmode);
1936 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1940 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1941 registers represented by a PARALLEL. SSIZE represents the total size of
1942 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1944 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1945 the balance will be in what would be the low-order memory addresses, i.e.
1946 left justified for big endian, right justified for little endian. This
1947 happens to be true for the targets currently using this support. If this
1948 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1952 emit_group_load (dst, orig_src, ssize, align)
1960 if (GET_CODE (dst) != PARALLEL)
1963 /* Check for a NULL entry, used to indicate that the parameter goes
1964 both on the stack and in registers. */
1965 if (XEXP (XVECEXP (dst, 0, 0), 0))
1970 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1972 /* Process the pieces. */
1973 for (i = start; i < XVECLEN (dst, 0); i++)
1975 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1976 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1977 unsigned int bytelen = GET_MODE_SIZE (mode);
1980 /* Handle trailing fragments that run over the size of the struct. */
1981 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1983 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1984 bytelen = ssize - bytepos;
1989 /* If we won't be loading directly from memory, protect the real source
1990 from strange tricks we might play; but make sure that the source can
1991 be loaded directly into the destination. */
1993 if (GET_CODE (orig_src) != MEM
1994 && (!CONSTANT_P (orig_src)
1995 || (GET_MODE (orig_src) != mode
1996 && GET_MODE (orig_src) != VOIDmode)))
1998 if (GET_MODE (orig_src) == VOIDmode)
1999 src = gen_reg_rtx (mode);
2001 src = gen_reg_rtx (GET_MODE (orig_src));
2002 emit_move_insn (src, orig_src);
2005 /* Optimize the access just a bit. */
2006 if (GET_CODE (src) == MEM
2007 && align >= GET_MODE_ALIGNMENT (mode)
2008 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2009 && bytelen == GET_MODE_SIZE (mode))
2011 tmps[i] = gen_reg_rtx (mode);
2012 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2014 else if (GET_CODE (src) == CONCAT)
2017 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2018 tmps[i] = XEXP (src, 0);
2019 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2020 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2021 tmps[i] = XEXP (src, 1);
2025 else if (CONSTANT_P (src)
2026 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2029 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2030 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2031 mode, mode, align, ssize);
2033 if (BYTES_BIG_ENDIAN && shift)
2034 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2035 tmps[i], 0, OPTAB_WIDEN);
2040 /* Copy the extracted pieces into the proper (probable) hard regs. */
2041 for (i = start; i < XVECLEN (dst, 0); i++)
2042 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2045 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2046 registers represented by a PARALLEL. SSIZE represents the total size of
2047 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2050 emit_group_store (orig_dst, src, ssize, align)
2058 if (GET_CODE (src) != PARALLEL)
2061 /* Check for a NULL entry, used to indicate that the parameter goes
2062 both on the stack and in registers. */
2063 if (XEXP (XVECEXP (src, 0, 0), 0))
2068 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2070 /* Copy the (probable) hard regs into pseudos. */
2071 for (i = start; i < XVECLEN (src, 0); i++)
2073 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2074 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2075 emit_move_insn (tmps[i], reg);
2079 /* If we won't be storing directly into memory, protect the real destination
2080 from strange tricks we might play. */
2082 if (GET_CODE (dst) == PARALLEL)
2086 /* We can get a PARALLEL dst if there is a conditional expression in
2087 a return statement. In that case, the dst and src are the same,
2088 so no action is necessary. */
2089 if (rtx_equal_p (dst, src))
2092 /* It is unclear if we can ever reach here, but we may as well handle
2093 it. Allocate a temporary, and split this into a store/load to/from
2096 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2097 emit_group_store (temp, src, ssize, align);
2098 emit_group_load (dst, temp, ssize, align);
2101 else if (GET_CODE (dst) != MEM)
2103 dst = gen_reg_rtx (GET_MODE (orig_dst));
2104 /* Make life a bit easier for combine. */
2105 emit_move_insn (dst, const0_rtx);
2108 /* Process the pieces. */
2109 for (i = start; i < XVECLEN (src, 0); i++)
2111 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2112 enum machine_mode mode = GET_MODE (tmps[i]);
2113 unsigned int bytelen = GET_MODE_SIZE (mode);
2115 /* Handle trailing fragments that run over the size of the struct. */
2116 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2118 if (BYTES_BIG_ENDIAN)
2120 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2121 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2122 tmps[i], 0, OPTAB_WIDEN);
2124 bytelen = ssize - bytepos;
2127 /* Optimize the access just a bit. */
2128 if (GET_CODE (dst) == MEM
2129 && align >= GET_MODE_ALIGNMENT (mode)
2130 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2131 && bytelen == GET_MODE_SIZE (mode))
2132 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2134 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2135 mode, tmps[i], align, ssize);
2140 /* Copy from the pseudo into the (probable) hard reg. */
2141 if (GET_CODE (dst) == REG)
2142 emit_move_insn (orig_dst, dst);
2145 /* Generate code to copy a BLKmode object of TYPE out of a
2146 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2147 is null, a stack temporary is created. TGTBLK is returned.
2149 The primary purpose of this routine is to handle functions
2150 that return BLKmode structures in registers. Some machines
2151 (the PA for example) want to return all small structures
2152 in registers regardless of the structure's alignment. */
2155 copy_blkmode_from_reg (tgtblk, srcreg, type)
2160 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2161 rtx src = NULL, dst = NULL;
2162 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2163 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2167 tgtblk = assign_temp (build_qualified_type (type,
2169 | TYPE_QUAL_CONST)),
2171 preserve_temp_slots (tgtblk);
2174 /* This code assumes srcreg is at least a full word. If it isn't,
2175 copy it into a new pseudo which is a full word. */
2176 if (GET_MODE (srcreg) != BLKmode
2177 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2178 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2180 /* Structures whose size is not a multiple of a word are aligned
2181 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2182 machine, this means we must skip the empty high order bytes when
2183 calculating the bit offset. */
2184 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2185 big_endian_correction
2186 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2188 /* Copy the structure BITSIZE bites at a time.
2190 We could probably emit more efficient code for machines which do not use
2191 strict alignment, but it doesn't seem worth the effort at the current
2193 for (bitpos = 0, xbitpos = big_endian_correction;
2194 bitpos < bytes * BITS_PER_UNIT;
2195 bitpos += bitsize, xbitpos += bitsize)
2197 /* We need a new source operand each time xbitpos is on a
2198 word boundary and when xbitpos == big_endian_correction
2199 (the first time through). */
2200 if (xbitpos % BITS_PER_WORD == 0
2201 || xbitpos == big_endian_correction)
2202 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2205 /* We need a new destination operand each time bitpos is on
2207 if (bitpos % BITS_PER_WORD == 0)
2208 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2210 /* Use xbitpos for the source extraction (right justified) and
2211 xbitpos for the destination store (left justified). */
2212 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2213 extract_bit_field (src, bitsize,
2214 xbitpos % BITS_PER_WORD, 1,
2215 NULL_RTX, word_mode, word_mode,
2216 bitsize, BITS_PER_WORD),
2217 bitsize, BITS_PER_WORD);
2223 /* Add a USE expression for REG to the (possibly empty) list pointed
2224 to by CALL_FUSAGE. REG must denote a hard register. */
2227 use_reg (call_fusage, reg)
2228 rtx *call_fusage, reg;
2230 if (GET_CODE (reg) != REG
2231 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2235 = gen_rtx_EXPR_LIST (VOIDmode,
2236 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2239 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2240 starting at REGNO. All of these registers must be hard registers. */
2243 use_regs (call_fusage, regno, nregs)
2250 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2253 for (i = 0; i < nregs; i++)
2254 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2257 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2258 PARALLEL REGS. This is for calls that pass values in multiple
2259 non-contiguous locations. The Irix 6 ABI has examples of this. */
2262 use_group_regs (call_fusage, regs)
2268 for (i = 0; i < XVECLEN (regs, 0); i++)
2270 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2272 /* A NULL entry means the parameter goes both on the stack and in
2273 registers. This can also be a MEM for targets that pass values
2274 partially on the stack and partially in registers. */
2275 if (reg != 0 && GET_CODE (reg) == REG)
2276 use_reg (call_fusage, reg);
2282 can_store_by_pieces (len, constfun, constfundata, align)
2283 unsigned HOST_WIDE_INT len;
2284 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2288 unsigned HOST_WIDE_INT max_size, l;
2289 HOST_WIDE_INT offset = 0;
2290 enum machine_mode mode, tmode;
2291 enum insn_code icode;
2295 if (! MOVE_BY_PIECES_P (len, align))
2298 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2299 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2300 align = MOVE_MAX * BITS_PER_UNIT;
2302 /* We would first store what we can in the largest integer mode, then go to
2303 successively smaller modes. */
2306 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2311 max_size = MOVE_MAX_PIECES + 1;
2312 while (max_size > 1)
2314 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2315 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2316 if (GET_MODE_SIZE (tmode) < max_size)
2319 if (mode == VOIDmode)
2322 icode = mov_optab->handlers[(int) mode].insn_code;
2323 if (icode != CODE_FOR_nothing
2324 && align >= GET_MODE_ALIGNMENT (mode))
2326 unsigned int size = GET_MODE_SIZE (mode);
2333 cst = (*constfun) (constfundata, offset, mode);
2334 if (!LEGITIMATE_CONSTANT_P (cst))
2344 max_size = GET_MODE_SIZE (mode);
2347 /* The code above should have handled everything. */
2355 /* Generate several move instructions to store LEN bytes generated by
2356 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2357 pointer which will be passed as argument in every CONSTFUN call.
2358 ALIGN is maximum alignment we can assume. */
2361 store_by_pieces (to, len, constfun, constfundata, align)
2363 unsigned HOST_WIDE_INT len;
2364 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2368 struct store_by_pieces data;
2370 if (! MOVE_BY_PIECES_P (len, align))
2372 to = protect_from_queue (to, 1);
2373 data.constfun = constfun;
2374 data.constfundata = constfundata;
2377 store_by_pieces_1 (&data, align);
2380 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2381 rtx with BLKmode). The caller must pass TO through protect_from_queue
2382 before calling. ALIGN is maximum alignment we can assume. */
2385 clear_by_pieces (to, len, align)
2387 unsigned HOST_WIDE_INT len;
2390 struct store_by_pieces data;
2392 data.constfun = clear_by_pieces_1;
2393 data.constfundata = NULL;
2396 store_by_pieces_1 (&data, align);
2399 /* Callback routine for clear_by_pieces.
2400 Return const0_rtx unconditionally. */
2403 clear_by_pieces_1 (data, offset, mode)
2404 PTR data ATTRIBUTE_UNUSED;
2405 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2406 enum machine_mode mode ATTRIBUTE_UNUSED;
2411 /* Subroutine of clear_by_pieces and store_by_pieces.
2412 Generate several move instructions to store LEN bytes of block TO. (A MEM
2413 rtx with BLKmode). The caller must pass TO through protect_from_queue
2414 before calling. ALIGN is maximum alignment we can assume. */
2417 store_by_pieces_1 (data, align)
2418 struct store_by_pieces *data;
2421 rtx to_addr = XEXP (data->to, 0);
2422 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2423 enum machine_mode mode = VOIDmode, tmode;
2424 enum insn_code icode;
2427 data->to_addr = to_addr;
2429 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2430 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2432 data->explicit_inc_to = 0;
2434 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2436 data->offset = data->len;
2438 /* If storing requires more than two move insns,
2439 copy addresses to registers (to make displacements shorter)
2440 and use post-increment if available. */
2441 if (!data->autinc_to
2442 && move_by_pieces_ninsns (data->len, align) > 2)
2444 /* Determine the main mode we'll be using. */
2445 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2446 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2447 if (GET_MODE_SIZE (tmode) < max_size)
2450 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2452 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2453 data->autinc_to = 1;
2454 data->explicit_inc_to = -1;
2457 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2458 && ! data->autinc_to)
2460 data->to_addr = copy_addr_to_reg (to_addr);
2461 data->autinc_to = 1;
2462 data->explicit_inc_to = 1;
2465 if ( !data->autinc_to && CONSTANT_P (to_addr))
2466 data->to_addr = copy_addr_to_reg (to_addr);
2469 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2470 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2471 align = MOVE_MAX * BITS_PER_UNIT;
2473 /* First store what we can in the largest integer mode, then go to
2474 successively smaller modes. */
2476 while (max_size > 1)
2478 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2479 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2480 if (GET_MODE_SIZE (tmode) < max_size)
2483 if (mode == VOIDmode)
2486 icode = mov_optab->handlers[(int) mode].insn_code;
2487 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2488 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2490 max_size = GET_MODE_SIZE (mode);
2493 /* The code above should have handled everything. */
2498 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2499 with move instructions for mode MODE. GENFUN is the gen_... function
2500 to make a move insn for that mode. DATA has all the other info. */
2503 store_by_pieces_2 (genfun, mode, data)
2504 rtx (*genfun) PARAMS ((rtx, ...));
2505 enum machine_mode mode;
2506 struct store_by_pieces *data;
2508 unsigned int size = GET_MODE_SIZE (mode);
2511 while (data->len >= size)
2514 data->offset -= size;
2516 if (data->autinc_to)
2518 to1 = replace_equiv_address (data->to, data->to_addr);
2519 to1 = adjust_address (to1, mode, 0);
2522 to1 = adjust_address (data->to, mode, data->offset);
2524 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2525 emit_insn (gen_add2_insn (data->to_addr,
2526 GEN_INT (-(HOST_WIDE_INT) size)));
2528 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2529 emit_insn ((*genfun) (to1, cst));
2531 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2532 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2534 if (! data->reverse)
2535 data->offset += size;
2541 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2542 its length in bytes and ALIGN is the maximum alignment we can is has.
2544 If we call a function that returns the length of the block, return it. */
2547 clear_storage (object, size, align)
2552 #ifdef TARGET_MEM_FUNCTIONS
2554 tree call_expr, arg_list;
2558 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2559 just move a zero. Otherwise, do this a piece at a time. */
2560 if (GET_MODE (object) != BLKmode
2561 && GET_CODE (size) == CONST_INT
2562 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2563 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2566 object = protect_from_queue (object, 1);
2567 size = protect_from_queue (size, 0);
2569 if (GET_CODE (size) == CONST_INT
2570 && MOVE_BY_PIECES_P (INTVAL (size), align))
2571 clear_by_pieces (object, INTVAL (size), align);
2574 /* Try the most limited insn first, because there's no point
2575 including more than one in the machine description unless
2576 the more limited one has some advantage. */
2578 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2579 enum machine_mode mode;
2581 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2582 mode = GET_MODE_WIDER_MODE (mode))
2584 enum insn_code code = clrstr_optab[(int) mode];
2585 insn_operand_predicate_fn pred;
2587 if (code != CODE_FOR_nothing
2588 /* We don't need MODE to be narrower than
2589 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2590 the mode mask, as it is returned by the macro, it will
2591 definitely be less than the actual mode mask. */
2592 && ((GET_CODE (size) == CONST_INT
2593 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2594 <= (GET_MODE_MASK (mode) >> 1)))
2595 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2596 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2597 || (*pred) (object, BLKmode))
2598 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2599 || (*pred) (opalign, VOIDmode)))
2602 rtx last = get_last_insn ();
2605 op1 = convert_to_mode (mode, size, 1);
2606 pred = insn_data[(int) code].operand[1].predicate;
2607 if (pred != 0 && ! (*pred) (op1, mode))
2608 op1 = copy_to_mode_reg (mode, op1);
2610 pat = GEN_FCN ((int) code) (object, op1, opalign);
2617 delete_insns_since (last);
2621 /* OBJECT or SIZE may have been passed through protect_from_queue.
2623 It is unsafe to save the value generated by protect_from_queue
2624 and reuse it later. Consider what happens if emit_queue is
2625 called before the return value from protect_from_queue is used.
2627 Expansion of the CALL_EXPR below will call emit_queue before
2628 we are finished emitting RTL for argument setup. So if we are
2629 not careful we could get the wrong value for an argument.
2631 To avoid this problem we go ahead and emit code to copy OBJECT
2632 and SIZE into new pseudos. We can then place those new pseudos
2633 into an RTL_EXPR and use them later, even after a call to
2636 Note this is not strictly needed for library calls since they
2637 do not call emit_queue before loading their arguments. However,
2638 we may need to have library calls call emit_queue in the future
2639 since failing to do so could cause problems for targets which
2640 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2641 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2643 #ifdef TARGET_MEM_FUNCTIONS
2644 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2646 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2647 TREE_UNSIGNED (integer_type_node));
2648 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2651 #ifdef TARGET_MEM_FUNCTIONS
2652 /* It is incorrect to use the libcall calling conventions to call
2653 memset in this context.
2655 This could be a user call to memset and the user may wish to
2656 examine the return value from memset.
2658 For targets where libcalls and normal calls have different
2659 conventions for returning pointers, we could end up generating
2662 So instead of using a libcall sequence we build up a suitable
2663 CALL_EXPR and expand the call in the normal fashion. */
2664 if (fn == NULL_TREE)
2668 /* This was copied from except.c, I don't know if all this is
2669 necessary in this context or not. */
2670 fn = get_identifier ("memset");
2671 fntype = build_pointer_type (void_type_node);
2672 fntype = build_function_type (fntype, NULL_TREE);
2673 fn = build_decl (FUNCTION_DECL, fn, fntype);
2674 ggc_add_tree_root (&fn, 1);
2675 DECL_EXTERNAL (fn) = 1;
2676 TREE_PUBLIC (fn) = 1;
2677 DECL_ARTIFICIAL (fn) = 1;
2678 TREE_NOTHROW (fn) = 1;
2679 make_decl_rtl (fn, NULL);
2680 assemble_external (fn);
2683 /* We need to make an argument list for the function call.
2685 memset has three arguments, the first is a void * addresses, the
2686 second an integer with the initialization value, the last is a
2687 size_t byte count for the copy. */
2689 = build_tree_list (NULL_TREE,
2690 make_tree (build_pointer_type (void_type_node),
2692 TREE_CHAIN (arg_list)
2693 = build_tree_list (NULL_TREE,
2694 make_tree (integer_type_node, const0_rtx));
2695 TREE_CHAIN (TREE_CHAIN (arg_list))
2696 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2697 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2699 /* Now we have to build up the CALL_EXPR itself. */
2700 call_expr = build1 (ADDR_EXPR,
2701 build_pointer_type (TREE_TYPE (fn)), fn);
2702 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2703 call_expr, arg_list, NULL_TREE);
2704 TREE_SIDE_EFFECTS (call_expr) = 1;
2706 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2708 emit_library_call (bzero_libfunc, LCT_NORMAL,
2709 VOIDmode, 2, object, Pmode, size,
2710 TYPE_MODE (integer_type_node));
2718 /* Generate code to copy Y into X.
2719 Both Y and X must have the same mode, except that
2720 Y can be a constant with VOIDmode.
2721 This mode cannot be BLKmode; use emit_block_move for that.
2723 Return the last instruction emitted. */
2726 emit_move_insn (x, y)
2729 enum machine_mode mode = GET_MODE (x);
2730 rtx y_cst = NULL_RTX;
2733 x = protect_from_queue (x, 1);
2734 y = protect_from_queue (y, 0);
2736 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2739 /* Never force constant_p_rtx to memory. */
2740 if (GET_CODE (y) == CONSTANT_P_RTX)
2742 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2745 y = force_const_mem (mode, y);
2748 /* If X or Y are memory references, verify that their addresses are valid
2750 if (GET_CODE (x) == MEM
2751 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2752 && ! push_operand (x, GET_MODE (x)))
2754 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2755 x = validize_mem (x);
2757 if (GET_CODE (y) == MEM
2758 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2760 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2761 y = validize_mem (y);
2763 if (mode == BLKmode)
2766 last_insn = emit_move_insn_1 (x, y);
2768 if (y_cst && GET_CODE (x) == REG)
2769 REG_NOTES (last_insn)
2770 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2775 /* Low level part of emit_move_insn.
2776 Called just like emit_move_insn, but assumes X and Y
2777 are basically valid. */
2780 emit_move_insn_1 (x, y)
2783 enum machine_mode mode = GET_MODE (x);
2784 enum machine_mode submode;
2785 enum mode_class class = GET_MODE_CLASS (mode);
2788 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2791 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2793 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2795 /* Expand complex moves by moving real part and imag part, if possible. */
2796 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2797 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2799 (class == MODE_COMPLEX_INT
2800 ? MODE_INT : MODE_FLOAT),
2802 && (mov_optab->handlers[(int) submode].insn_code
2803 != CODE_FOR_nothing))
2805 /* Don't split destination if it is a stack push. */
2806 int stack = push_operand (x, GET_MODE (x));
2808 #ifdef PUSH_ROUNDING
2809 /* In case we output to the stack, but the size is smaller machine can
2810 push exactly, we need to use move instructions. */
2812 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2815 int offset1, offset2;
2817 /* Do not use anti_adjust_stack, since we don't want to update
2818 stack_pointer_delta. */
2819 temp = expand_binop (Pmode,
2820 #ifdef STACK_GROWS_DOWNWARD
2827 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2831 if (temp != stack_pointer_rtx)
2832 emit_move_insn (stack_pointer_rtx, temp);
2833 #ifdef STACK_GROWS_DOWNWARD
2835 offset2 = GET_MODE_SIZE (submode);
2837 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2838 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2839 + GET_MODE_SIZE (submode));
2841 emit_move_insn (change_address (x, submode,
2842 gen_rtx_PLUS (Pmode,
2844 GEN_INT (offset1))),
2845 gen_realpart (submode, y));
2846 emit_move_insn (change_address (x, submode,
2847 gen_rtx_PLUS (Pmode,
2849 GEN_INT (offset2))),
2850 gen_imagpart (submode, y));
2854 /* If this is a stack, push the highpart first, so it
2855 will be in the argument order.
2857 In that case, change_address is used only to convert
2858 the mode, not to change the address. */
2861 /* Note that the real part always precedes the imag part in memory
2862 regardless of machine's endianness. */
2863 #ifdef STACK_GROWS_DOWNWARD
2864 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2865 (gen_rtx_MEM (submode, XEXP (x, 0)),
2866 gen_imagpart (submode, y)));
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_realpart (submode, y)));
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2872 (gen_rtx_MEM (submode, XEXP (x, 0)),
2873 gen_realpart (submode, y)));
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_imagpart (submode, y)));
2881 rtx realpart_x, realpart_y;
2882 rtx imagpart_x, imagpart_y;
2884 /* If this is a complex value with each part being smaller than a
2885 word, the usual calling sequence will likely pack the pieces into
2886 a single register. Unfortunately, SUBREG of hard registers only
2887 deals in terms of words, so we have a problem converting input
2888 arguments to the CONCAT of two registers that is used elsewhere
2889 for complex values. If this is before reload, we can copy it into
2890 memory and reload. FIXME, we should see about using extract and
2891 insert on integer registers, but complex short and complex char
2892 variables should be rarely used. */
2893 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2894 && (reload_in_progress | reload_completed) == 0)
2896 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2897 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2899 if (packed_dest_p || packed_src_p)
2901 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2902 ? MODE_FLOAT : MODE_INT);
2904 enum machine_mode reg_mode
2905 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2907 if (reg_mode != BLKmode)
2909 rtx mem = assign_stack_temp (reg_mode,
2910 GET_MODE_SIZE (mode), 0);
2911 rtx cmem = adjust_address (mem, mode, 0);
2914 = N_("function using short complex types cannot be inline");
2918 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2919 emit_move_insn_1 (cmem, y);
2920 return emit_move_insn_1 (sreg, mem);
2924 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2925 emit_move_insn_1 (mem, sreg);
2926 return emit_move_insn_1 (x, cmem);
2932 realpart_x = gen_realpart (submode, x);
2933 realpart_y = gen_realpart (submode, y);
2934 imagpart_x = gen_imagpart (submode, x);
2935 imagpart_y = gen_imagpart (submode, y);
2937 /* Show the output dies here. This is necessary for SUBREGs
2938 of pseudos since we cannot track their lifetimes correctly;
2939 hard regs shouldn't appear here except as return values.
2940 We never want to emit such a clobber after reload. */
2942 && ! (reload_in_progress || reload_completed)
2943 && (GET_CODE (realpart_x) == SUBREG
2944 || GET_CODE (imagpart_x) == SUBREG))
2946 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2949 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2950 (realpart_x, realpart_y));
2951 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2952 (imagpart_x, imagpart_y));
2955 return get_last_insn ();
2958 /* This will handle any multi-word mode that lacks a move_insn pattern.
2959 However, you will get better code if you define such patterns,
2960 even if they must turn into multiple assembler instructions. */
2961 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2967 #ifdef PUSH_ROUNDING
2969 /* If X is a push on the stack, do the push now and replace
2970 X with a reference to the stack pointer. */
2971 if (push_operand (x, GET_MODE (x)))
2976 /* Do not use anti_adjust_stack, since we don't want to update
2977 stack_pointer_delta. */
2978 temp = expand_binop (Pmode,
2979 #ifdef STACK_GROWS_DOWNWARD
2986 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2990 if (temp != stack_pointer_rtx)
2991 emit_move_insn (stack_pointer_rtx, temp);
2993 code = GET_CODE (XEXP (x, 0));
2994 /* Just hope that small offsets off SP are OK. */
2995 if (code == POST_INC)
2996 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2997 GEN_INT (-(HOST_WIDE_INT)
2998 GET_MODE_SIZE (GET_MODE (x))));
2999 else if (code == POST_DEC)
3000 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3001 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3003 temp = stack_pointer_rtx;
3005 x = change_address (x, VOIDmode, temp);
3009 /* If we are in reload, see if either operand is a MEM whose address
3010 is scheduled for replacement. */
3011 if (reload_in_progress && GET_CODE (x) == MEM
3012 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3013 x = replace_equiv_address_nv (x, inner);
3014 if (reload_in_progress && GET_CODE (y) == MEM
3015 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3016 y = replace_equiv_address_nv (y, inner);
3022 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3025 rtx xpart = operand_subword (x, i, 1, mode);
3026 rtx ypart = operand_subword (y, i, 1, mode);
3028 /* If we can't get a part of Y, put Y into memory if it is a
3029 constant. Otherwise, force it into a register. If we still
3030 can't get a part of Y, abort. */
3031 if (ypart == 0 && CONSTANT_P (y))
3033 y = force_const_mem (mode, y);
3034 ypart = operand_subword (y, i, 1, mode);
3036 else if (ypart == 0)
3037 ypart = operand_subword_force (y, i, mode);
3039 if (xpart == 0 || ypart == 0)
3042 need_clobber |= (GET_CODE (xpart) == SUBREG);
3044 last_insn = emit_move_insn (xpart, ypart);
3047 seq = gen_sequence ();
3050 /* Show the output dies here. This is necessary for SUBREGs
3051 of pseudos since we cannot track their lifetimes correctly;
3052 hard regs shouldn't appear here except as return values.
3053 We never want to emit such a clobber after reload. */
3055 && ! (reload_in_progress || reload_completed)
3056 && need_clobber != 0)
3058 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3069 /* Pushing data onto the stack. */
3071 /* Push a block of length SIZE (perhaps variable)
3072 and return an rtx to address the beginning of the block.
3073 Note that it is not possible for the value returned to be a QUEUED.
3074 The value may be virtual_outgoing_args_rtx.
3076 EXTRA is the number of bytes of padding to push in addition to SIZE.
3077 BELOW nonzero means this padding comes at low addresses;
3078 otherwise, the padding comes at high addresses. */
3081 push_block (size, extra, below)
3087 size = convert_modes (Pmode, ptr_mode, size, 1);
3088 if (CONSTANT_P (size))
3089 anti_adjust_stack (plus_constant (size, extra));
3090 else if (GET_CODE (size) == REG && extra == 0)
3091 anti_adjust_stack (size);
3094 temp = copy_to_mode_reg (Pmode, size);
3096 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3097 temp, 0, OPTAB_LIB_WIDEN);
3098 anti_adjust_stack (temp);
3101 #ifndef STACK_GROWS_DOWNWARD
3107 temp = virtual_outgoing_args_rtx;
3108 if (extra != 0 && below)
3109 temp = plus_constant (temp, extra);
3113 if (GET_CODE (size) == CONST_INT)
3114 temp = plus_constant (virtual_outgoing_args_rtx,
3115 -INTVAL (size) - (below ? 0 : extra));
3116 else if (extra != 0 && !below)
3117 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3118 negate_rtx (Pmode, plus_constant (size, extra)));
3120 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3121 negate_rtx (Pmode, size));
3124 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3128 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3129 block of SIZE bytes. */
3132 get_push_address (size)
3137 if (STACK_PUSH_CODE == POST_DEC)
3138 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3139 else if (STACK_PUSH_CODE == POST_INC)
3140 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3142 temp = stack_pointer_rtx;
3144 return copy_to_reg (temp);
3147 #ifdef PUSH_ROUNDING
3149 /* Emit single push insn. */
3152 emit_single_push_insn (mode, x, type)
3154 enum machine_mode mode;
3158 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3160 enum insn_code icode;
3161 insn_operand_predicate_fn pred;
3163 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3164 /* If there is push pattern, use it. Otherwise try old way of throwing
3165 MEM representing push operation to move expander. */
3166 icode = push_optab->handlers[(int) mode].insn_code;
3167 if (icode != CODE_FOR_nothing)
3169 if (((pred = insn_data[(int) icode].operand[0].predicate)
3170 && !((*pred) (x, mode))))
3171 x = force_reg (mode, x);
3172 emit_insn (GEN_FCN (icode) (x));
3175 if (GET_MODE_SIZE (mode) == rounded_size)
3176 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3179 #ifdef STACK_GROWS_DOWNWARD
3180 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3181 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3183 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3184 GEN_INT (rounded_size));
3186 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3189 dest = gen_rtx_MEM (mode, dest_addr);
3193 set_mem_attributes (dest, type, 1);
3194 /* Function incoming arguments may overlap with sibling call
3195 outgoing arguments and we cannot allow reordering of reads
3196 from function arguments with stores to outgoing arguments
3197 of sibling calls. */
3198 set_mem_alias_set (dest, 0);
3200 emit_move_insn (dest, x);
3204 /* Generate code to push X onto the stack, assuming it has mode MODE and
3206 MODE is redundant except when X is a CONST_INT (since they don't
3208 SIZE is an rtx for the size of data to be copied (in bytes),
3209 needed only if X is BLKmode.
3211 ALIGN (in bits) is maximum alignment we can assume.
3213 If PARTIAL and REG are both nonzero, then copy that many of the first
3214 words of X into registers starting with REG, and push the rest of X.
3215 The amount of space pushed is decreased by PARTIAL words,
3216 rounded *down* to a multiple of PARM_BOUNDARY.
3217 REG must be a hard register in this case.
3218 If REG is zero but PARTIAL is not, take any all others actions for an
3219 argument partially in registers, but do not actually load any
3222 EXTRA is the amount in bytes of extra space to leave next to this arg.
3223 This is ignored if an argument block has already been allocated.
3225 On a machine that lacks real push insns, ARGS_ADDR is the address of
3226 the bottom of the argument block for this call. We use indexing off there
3227 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3228 argument block has not been preallocated.
3230 ARGS_SO_FAR is the size of args previously pushed for this call.
3232 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3233 for arguments passed in registers. If nonzero, it will be the number
3234 of bytes required. */
3237 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3238 args_addr, args_so_far, reg_parm_stack_space,
3241 enum machine_mode mode;
3250 int reg_parm_stack_space;
3254 enum direction stack_direction
3255 #ifdef STACK_GROWS_DOWNWARD
3261 /* Decide where to pad the argument: `downward' for below,
3262 `upward' for above, or `none' for don't pad it.
3263 Default is below for small data on big-endian machines; else above. */
3264 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3266 /* Invert direction if stack is post-decrement.
3268 if (STACK_PUSH_CODE == POST_DEC)
3269 if (where_pad != none)
3270 where_pad = (where_pad == downward ? upward : downward);
3272 xinner = x = protect_from_queue (x, 0);
3274 if (mode == BLKmode)
3276 /* Copy a block into the stack, entirely or partially. */
3279 int used = partial * UNITS_PER_WORD;
3280 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3288 /* USED is now the # of bytes we need not copy to the stack
3289 because registers will take care of them. */
3292 xinner = adjust_address (xinner, BLKmode, used);
3294 /* If the partial register-part of the arg counts in its stack size,
3295 skip the part of stack space corresponding to the registers.
3296 Otherwise, start copying to the beginning of the stack space,
3297 by setting SKIP to 0. */
3298 skip = (reg_parm_stack_space == 0) ? 0 : used;
3300 #ifdef PUSH_ROUNDING
3301 /* Do it with several push insns if that doesn't take lots of insns
3302 and if there is no difficulty with push insns that skip bytes
3303 on the stack for alignment purposes. */
3306 && GET_CODE (size) == CONST_INT
3308 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3309 /* Here we avoid the case of a structure whose weak alignment
3310 forces many pushes of a small amount of data,
3311 and such small pushes do rounding that causes trouble. */
3312 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3313 || align >= BIGGEST_ALIGNMENT
3314 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3315 == (align / BITS_PER_UNIT)))
3316 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3318 /* Push padding now if padding above and stack grows down,
3319 or if padding below and stack grows up.
3320 But if space already allocated, this has already been done. */
3321 if (extra && args_addr == 0
3322 && where_pad != none && where_pad != stack_direction)
3323 anti_adjust_stack (GEN_INT (extra));
3325 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3327 if (current_function_check_memory_usage && ! in_check_memory_usage)
3331 in_check_memory_usage = 1;
3332 temp = get_push_address (INTVAL (size) - used);
3333 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3334 emit_library_call (chkr_copy_bitmap_libfunc,
3335 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3336 Pmode, XEXP (xinner, 0), Pmode,
3337 GEN_INT (INTVAL (size) - used),
3338 TYPE_MODE (sizetype));
3340 emit_library_call (chkr_set_right_libfunc,
3341 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3342 Pmode, GEN_INT (INTVAL (size) - used),
3343 TYPE_MODE (sizetype),
3344 GEN_INT (MEMORY_USE_RW),
3345 TYPE_MODE (integer_type_node));
3346 in_check_memory_usage = 0;
3350 #endif /* PUSH_ROUNDING */
3354 /* Otherwise make space on the stack and copy the data
3355 to the address of that space. */
3357 /* Deduct words put into registers from the size we must copy. */
3360 if (GET_CODE (size) == CONST_INT)
3361 size = GEN_INT (INTVAL (size) - used);
3363 size = expand_binop (GET_MODE (size), sub_optab, size,
3364 GEN_INT (used), NULL_RTX, 0,
3368 /* Get the address of the stack space.
3369 In this case, we do not deal with EXTRA separately.
3370 A single stack adjust will do. */
3373 temp = push_block (size, extra, where_pad == downward);
3376 else if (GET_CODE (args_so_far) == CONST_INT)
3377 temp = memory_address (BLKmode,
3378 plus_constant (args_addr,
3379 skip + INTVAL (args_so_far)));
3381 temp = memory_address (BLKmode,
3382 plus_constant (gen_rtx_PLUS (Pmode,
3386 if (current_function_check_memory_usage && ! in_check_memory_usage)
3388 in_check_memory_usage = 1;
3389 target = copy_to_reg (temp);
3390 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3391 emit_library_call (chkr_copy_bitmap_libfunc,
3392 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3394 XEXP (xinner, 0), Pmode,
3395 size, TYPE_MODE (sizetype));
3397 emit_library_call (chkr_set_right_libfunc,
3398 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3400 size, TYPE_MODE (sizetype),
3401 GEN_INT (MEMORY_USE_RW),
3402 TYPE_MODE (integer_type_node));
3403 in_check_memory_usage = 0;
3406 target = gen_rtx_MEM (BLKmode, temp);
3410 set_mem_attributes (target, type, 1);
3411 /* Function incoming arguments may overlap with sibling call
3412 outgoing arguments and we cannot allow reordering of reads
3413 from function arguments with stores to outgoing arguments
3414 of sibling calls. */
3415 set_mem_alias_set (target, 0);
3418 /* TEMP is the address of the block. Copy the data there. */
3419 if (GET_CODE (size) == CONST_INT
3420 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3422 move_by_pieces (target, xinner, INTVAL (size), align);
3427 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3428 enum machine_mode mode;
3430 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3432 mode = GET_MODE_WIDER_MODE (mode))
3434 enum insn_code code = movstr_optab[(int) mode];
3435 insn_operand_predicate_fn pred;
3437 if (code != CODE_FOR_nothing
3438 && ((GET_CODE (size) == CONST_INT
3439 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3440 <= (GET_MODE_MASK (mode) >> 1)))
3441 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3442 && (!(pred = insn_data[(int) code].operand[0].predicate)
3443 || ((*pred) (target, BLKmode)))
3444 && (!(pred = insn_data[(int) code].operand[1].predicate)
3445 || ((*pred) (xinner, BLKmode)))
3446 && (!(pred = insn_data[(int) code].operand[3].predicate)
3447 || ((*pred) (opalign, VOIDmode))))
3449 rtx op2 = convert_to_mode (mode, size, 1);
3450 rtx last = get_last_insn ();
3453 pred = insn_data[(int) code].operand[2].predicate;
3454 if (pred != 0 && ! (*pred) (op2, mode))
3455 op2 = copy_to_mode_reg (mode, op2);
3457 pat = GEN_FCN ((int) code) (target, xinner,
3465 delete_insns_since (last);
3470 if (!ACCUMULATE_OUTGOING_ARGS)
3472 /* If the source is referenced relative to the stack pointer,
3473 copy it to another register to stabilize it. We do not need
3474 to do this if we know that we won't be changing sp. */
3476 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3477 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3478 temp = copy_to_reg (temp);
3481 /* Make inhibit_defer_pop nonzero around the library call
3482 to force it to pop the bcopy-arguments right away. */
3484 #ifdef TARGET_MEM_FUNCTIONS
3485 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3486 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3487 convert_to_mode (TYPE_MODE (sizetype),
3488 size, TREE_UNSIGNED (sizetype)),
3489 TYPE_MODE (sizetype));
3491 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3492 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3493 convert_to_mode (TYPE_MODE (integer_type_node),
3495 TREE_UNSIGNED (integer_type_node)),
3496 TYPE_MODE (integer_type_node));
3501 else if (partial > 0)
3503 /* Scalar partly in registers. */
3505 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3508 /* # words of start of argument
3509 that we must make space for but need not store. */
3510 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3511 int args_offset = INTVAL (args_so_far);
3514 /* Push padding now if padding above and stack grows down,
3515 or if padding below and stack grows up.
3516 But if space already allocated, this has already been done. */
3517 if (extra && args_addr == 0
3518 && where_pad != none && where_pad != stack_direction)
3519 anti_adjust_stack (GEN_INT (extra));
3521 /* If we make space by pushing it, we might as well push
3522 the real data. Otherwise, we can leave OFFSET nonzero
3523 and leave the space uninitialized. */
3527 /* Now NOT_STACK gets the number of words that we don't need to
3528 allocate on the stack. */
3529 not_stack = partial - offset;
3531 /* If the partial register-part of the arg counts in its stack size,
3532 skip the part of stack space corresponding to the registers.
3533 Otherwise, start copying to the beginning of the stack space,
3534 by setting SKIP to 0. */
3535 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3537 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3538 x = validize_mem (force_const_mem (mode, x));
3540 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3541 SUBREGs of such registers are not allowed. */
3542 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3543 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3544 x = copy_to_reg (x);
3546 /* Loop over all the words allocated on the stack for this arg. */
3547 /* We can do it by words, because any scalar bigger than a word
3548 has a size a multiple of a word. */
3549 #ifndef PUSH_ARGS_REVERSED
3550 for (i = not_stack; i < size; i++)
3552 for (i = size - 1; i >= not_stack; i--)
3554 if (i >= not_stack + offset)
3555 emit_push_insn (operand_subword_force (x, i, mode),
3556 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3558 GEN_INT (args_offset + ((i - not_stack + skip)
3560 reg_parm_stack_space, alignment_pad);
3565 rtx target = NULL_RTX;
3568 /* Push padding now if padding above and stack grows down,
3569 or if padding below and stack grows up.
3570 But if space already allocated, this has already been done. */
3571 if (extra && args_addr == 0
3572 && where_pad != none && where_pad != stack_direction)
3573 anti_adjust_stack (GEN_INT (extra));
3575 #ifdef PUSH_ROUNDING
3576 if (args_addr == 0 && PUSH_ARGS)
3577 emit_single_push_insn (mode, x, type);
3581 if (GET_CODE (args_so_far) == CONST_INT)
3583 = memory_address (mode,
3584 plus_constant (args_addr,
3585 INTVAL (args_so_far)));
3587 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3590 dest = gen_rtx_MEM (mode, addr);
3593 set_mem_attributes (dest, type, 1);
3594 /* Function incoming arguments may overlap with sibling call
3595 outgoing arguments and we cannot allow reordering of reads
3596 from function arguments with stores to outgoing arguments
3597 of sibling calls. */
3598 set_mem_alias_set (dest, 0);
3601 emit_move_insn (dest, x);
3605 if (current_function_check_memory_usage && ! in_check_memory_usage)
3607 in_check_memory_usage = 1;
3609 target = get_push_address (GET_MODE_SIZE (mode));
3611 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3612 emit_library_call (chkr_copy_bitmap_libfunc,
3613 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3614 Pmode, XEXP (x, 0), Pmode,
3615 GEN_INT (GET_MODE_SIZE (mode)),
3616 TYPE_MODE (sizetype));
3618 emit_library_call (chkr_set_right_libfunc,
3619 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3620 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3621 TYPE_MODE (sizetype),
3622 GEN_INT (MEMORY_USE_RW),
3623 TYPE_MODE (integer_type_node));
3624 in_check_memory_usage = 0;
3629 /* If part should go in registers, copy that part
3630 into the appropriate registers. Do this now, at the end,
3631 since mem-to-mem copies above may do function calls. */
3632 if (partial > 0 && reg != 0)
3634 /* Handle calls that pass values in multiple non-contiguous locations.
3635 The Irix 6 ABI has examples of this. */
3636 if (GET_CODE (reg) == PARALLEL)
3637 emit_group_load (reg, x, -1, align); /* ??? size? */
3639 move_block_to_reg (REGNO (reg), x, partial, mode);
3642 if (extra && args_addr == 0 && where_pad == stack_direction)
3643 anti_adjust_stack (GEN_INT (extra));
3645 if (alignment_pad && args_addr == 0)
3646 anti_adjust_stack (alignment_pad);
3649 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3657 /* Only registers can be subtargets. */
3658 || GET_CODE (x) != REG
3659 /* If the register is readonly, it can't be set more than once. */
3660 || RTX_UNCHANGING_P (x)
3661 /* Don't use hard regs to avoid extending their life. */
3662 || REGNO (x) < FIRST_PSEUDO_REGISTER
3663 /* Avoid subtargets inside loops,
3664 since they hide some invariant expressions. */
3665 || preserve_subexpressions_p ())
3669 /* Expand an assignment that stores the value of FROM into TO.
3670 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3671 (This may contain a QUEUED rtx;
3672 if the value is constant, this rtx is a constant.)
3673 Otherwise, the returned value is NULL_RTX.
3675 SUGGEST_REG is no longer actually used.
3676 It used to mean, copy the value through a register
3677 and return that register, if that is possible.
3678 We now use WANT_VALUE to decide whether to do this. */
3681 expand_assignment (to, from, want_value, suggest_reg)
3684 int suggest_reg ATTRIBUTE_UNUSED;
3689 /* Don't crash if the lhs of the assignment was erroneous. */
3691 if (TREE_CODE (to) == ERROR_MARK)
3693 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3694 return want_value ? result : NULL_RTX;
3697 /* Assignment of a structure component needs special treatment
3698 if the structure component's rtx is not simply a MEM.
3699 Assignment of an array element at a constant index, and assignment of
3700 an array element in an unaligned packed structure field, has the same
3703 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3704 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3706 enum machine_mode mode1;
3707 HOST_WIDE_INT bitsize, bitpos;
3712 unsigned int alignment;
3715 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3716 &unsignedp, &volatilep, &alignment);
3718 /* If we are going to use store_bit_field and extract_bit_field,
3719 make sure to_rtx will be safe for multiple use. */
3721 if (mode1 == VOIDmode && want_value)
3722 tem = stabilize_reference (tem);
3724 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3727 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3729 if (GET_CODE (to_rtx) != MEM)
3732 if (GET_MODE (offset_rtx) != ptr_mode)
3734 #ifdef POINTERS_EXTEND_UNSIGNED
3735 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3737 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3741 /* A constant address in TO_RTX can have VOIDmode, we must not try
3742 to call force_reg for that case. Avoid that case. */
3743 if (GET_CODE (to_rtx) == MEM
3744 && GET_MODE (to_rtx) == BLKmode
3745 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3747 && (bitpos % bitsize) == 0
3748 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3749 && alignment == GET_MODE_ALIGNMENT (mode1))
3752 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3754 if (GET_CODE (XEXP (temp, 0)) == REG)
3757 to_rtx = (replace_equiv_address
3758 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3763 to_rtx = change_address (to_rtx, VOIDmode,
3764 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3765 force_reg (ptr_mode,
3771 if (GET_CODE (to_rtx) == MEM)
3773 /* When the offset is zero, to_rtx is the address of the
3774 structure we are storing into, and hence may be shared.
3775 We must make a new MEM before setting the volatile bit. */
3777 to_rtx = copy_rtx (to_rtx);
3779 MEM_VOLATILE_P (to_rtx) = 1;
3781 #if 0 /* This was turned off because, when a field is volatile
3782 in an object which is not volatile, the object may be in a register,
3783 and then we would abort over here. */
3789 if (TREE_CODE (to) == COMPONENT_REF
3790 && TREE_READONLY (TREE_OPERAND (to, 1)))
3793 to_rtx = copy_rtx (to_rtx);
3795 RTX_UNCHANGING_P (to_rtx) = 1;
3798 /* Check the access. */
3799 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3804 enum machine_mode best_mode;
3806 best_mode = get_best_mode (bitsize, bitpos,
3807 TYPE_ALIGN (TREE_TYPE (tem)),
3809 if (best_mode == VOIDmode)
3812 best_mode_size = GET_MODE_BITSIZE (best_mode);
3813 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3814 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3815 size *= GET_MODE_SIZE (best_mode);
3817 /* Check the access right of the pointer. */
3818 in_check_memory_usage = 1;
3820 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3821 VOIDmode, 3, to_addr, Pmode,
3822 GEN_INT (size), TYPE_MODE (sizetype),
3823 GEN_INT (MEMORY_USE_WO),
3824 TYPE_MODE (integer_type_node));
3825 in_check_memory_usage = 0;
3828 /* If this is a varying-length object, we must get the address of
3829 the source and do an explicit block move. */
3832 unsigned int from_align;
3833 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3835 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3837 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3838 MIN (alignment, from_align));
3845 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3847 /* Spurious cast for HPUX compiler. */
3848 ? ((enum machine_mode)
3849 TYPE_MODE (TREE_TYPE (to)))
3853 int_size_in_bytes (TREE_TYPE (tem)),
3854 get_alias_set (to));
3856 preserve_temp_slots (result);
3860 /* If the value is meaningful, convert RESULT to the proper mode.
3861 Otherwise, return nothing. */
3862 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3863 TYPE_MODE (TREE_TYPE (from)),
3865 TREE_UNSIGNED (TREE_TYPE (to)))
3870 /* If the rhs is a function call and its value is not an aggregate,
3871 call the function before we start to compute the lhs.
3872 This is needed for correct code for cases such as
3873 val = setjmp (buf) on machines where reference to val
3874 requires loading up part of an address in a separate insn.
3876 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3877 since it might be a promoted variable where the zero- or sign- extension
3878 needs to be done. Handling this in the normal way is safe because no
3879 computation is done before the call. */
3880 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3881 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3882 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3883 && GET_CODE (DECL_RTL (to)) == REG))
3888 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3890 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3892 /* Handle calls that return values in multiple non-contiguous locations.
3893 The Irix 6 ABI has examples of this. */
3894 if (GET_CODE (to_rtx) == PARALLEL)
3895 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3896 TYPE_ALIGN (TREE_TYPE (from)));
3897 else if (GET_MODE (to_rtx) == BLKmode)
3898 emit_block_move (to_rtx, value, expr_size (from),
3899 TYPE_ALIGN (TREE_TYPE (from)));
3902 #ifdef POINTERS_EXTEND_UNSIGNED
3903 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3904 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3905 value = convert_memory_address (GET_MODE (to_rtx), value);
3907 emit_move_insn (to_rtx, value);
3909 preserve_temp_slots (to_rtx);
3912 return want_value ? to_rtx : NULL_RTX;
3915 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3916 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3920 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3921 if (GET_CODE (to_rtx) == MEM)
3922 set_mem_alias_set (to_rtx, get_alias_set (to));
3925 /* Don't move directly into a return register. */
3926 if (TREE_CODE (to) == RESULT_DECL
3927 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3932 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3934 if (GET_CODE (to_rtx) == PARALLEL)
3935 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3936 TYPE_ALIGN (TREE_TYPE (from)));
3938 emit_move_insn (to_rtx, temp);
3940 preserve_temp_slots (to_rtx);
3943 return want_value ? to_rtx : NULL_RTX;
3946 /* In case we are returning the contents of an object which overlaps
3947 the place the value is being stored, use a safe function when copying
3948 a value through a pointer into a structure value return block. */
3949 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3950 && current_function_returns_struct
3951 && !current_function_returns_pcc_struct)
3956 size = expr_size (from);
3957 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3958 EXPAND_MEMORY_USE_DONT);
3960 /* Copy the rights of the bitmap. */
3961 if (current_function_check_memory_usage)
3962 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3963 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3964 XEXP (from_rtx, 0), Pmode,
3965 convert_to_mode (TYPE_MODE (sizetype),
3966 size, TREE_UNSIGNED (sizetype)),
3967 TYPE_MODE (sizetype));
3969 #ifdef TARGET_MEM_FUNCTIONS
3970 emit_library_call (memmove_libfunc, LCT_NORMAL,
3971 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3972 XEXP (from_rtx, 0), Pmode,
3973 convert_to_mode (TYPE_MODE (sizetype),
3974 size, TREE_UNSIGNED (sizetype)),
3975 TYPE_MODE (sizetype));
3977 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3978 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3979 XEXP (to_rtx, 0), Pmode,
3980 convert_to_mode (TYPE_MODE (integer_type_node),
3981 size, TREE_UNSIGNED (integer_type_node)),
3982 TYPE_MODE (integer_type_node));
3985 preserve_temp_slots (to_rtx);
3988 return want_value ? to_rtx : NULL_RTX;
3991 /* Compute FROM and store the value in the rtx we got. */
3994 result = store_expr (from, to_rtx, want_value);
3995 preserve_temp_slots (result);
3998 return want_value ? result : NULL_RTX;
4001 /* Generate code for computing expression EXP,
4002 and storing the value into TARGET.
4003 TARGET may contain a QUEUED rtx.
4005 If WANT_VALUE is nonzero, return a copy of the value
4006 not in TARGET, so that we can be sure to use the proper
4007 value in a containing expression even if TARGET has something
4008 else stored in it. If possible, we copy the value through a pseudo
4009 and return that pseudo. Or, if the value is constant, we try to
4010 return the constant. In some cases, we return a pseudo
4011 copied *from* TARGET.
4013 If the mode is BLKmode then we may return TARGET itself.
4014 It turns out that in BLKmode it doesn't cause a problem.
4015 because C has no operators that could combine two different
4016 assignments into the same BLKmode object with different values
4017 with no sequence point. Will other languages need this to
4020 If WANT_VALUE is 0, we return NULL, to make sure
4021 to catch quickly any cases where the caller uses the value
4022 and fails to set WANT_VALUE. */
4025 store_expr (exp, target, want_value)
4031 int dont_return_target = 0;
4032 int dont_store_target = 0;
4034 if (TREE_CODE (exp) == COMPOUND_EXPR)
4036 /* Perform first part of compound expression, then assign from second
4038 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4040 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4042 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4044 /* For conditional expression, get safe form of the target. Then
4045 test the condition, doing the appropriate assignment on either
4046 side. This avoids the creation of unnecessary temporaries.
4047 For non-BLKmode, it is more efficient not to do this. */
4049 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4052 target = protect_from_queue (target, 1);
4054 do_pending_stack_adjust ();
4056 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4057 start_cleanup_deferral ();
4058 store_expr (TREE_OPERAND (exp, 1), target, 0);
4059 end_cleanup_deferral ();
4061 emit_jump_insn (gen_jump (lab2));
4064 start_cleanup_deferral ();
4065 store_expr (TREE_OPERAND (exp, 2), target, 0);
4066 end_cleanup_deferral ();
4071 return want_value ? target : NULL_RTX;
4073 else if (queued_subexp_p (target))
4074 /* If target contains a postincrement, let's not risk
4075 using it as the place to generate the rhs. */
4077 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4079 /* Expand EXP into a new pseudo. */
4080 temp = gen_reg_rtx (GET_MODE (target));
4081 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4084 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4086 /* If target is volatile, ANSI requires accessing the value
4087 *from* the target, if it is accessed. So make that happen.
4088 In no case return the target itself. */
4089 if (! MEM_VOLATILE_P (target) && want_value)
4090 dont_return_target = 1;
4092 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4093 && GET_MODE (target) != BLKmode)
4094 /* If target is in memory and caller wants value in a register instead,
4095 arrange that. Pass TARGET as target for expand_expr so that,
4096 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4097 We know expand_expr will not use the target in that case.
4098 Don't do this if TARGET is volatile because we are supposed
4099 to write it and then read it. */
4101 temp = expand_expr (exp, target, GET_MODE (target), 0);
4102 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4104 /* If TEMP is already in the desired TARGET, only copy it from
4105 memory and don't store it there again. */
4107 || (rtx_equal_p (temp, target)
4108 && ! side_effects_p (temp) && ! side_effects_p (target)))
4109 dont_store_target = 1;
4110 temp = copy_to_reg (temp);
4112 dont_return_target = 1;
4114 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4115 /* If this is an scalar in a register that is stored in a wider mode
4116 than the declared mode, compute the result into its declared mode
4117 and then convert to the wider mode. Our value is the computed
4120 /* If we don't want a value, we can do the conversion inside EXP,
4121 which will often result in some optimizations. Do the conversion
4122 in two steps: first change the signedness, if needed, then
4123 the extend. But don't do this if the type of EXP is a subtype
4124 of something else since then the conversion might involve
4125 more than just converting modes. */
4126 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4127 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4129 if (TREE_UNSIGNED (TREE_TYPE (exp))
4130 != SUBREG_PROMOTED_UNSIGNED_P (target))
4133 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4137 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4138 SUBREG_PROMOTED_UNSIGNED_P (target)),
4142 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4144 /* If TEMP is a volatile MEM and we want a result value, make
4145 the access now so it gets done only once. Likewise if
4146 it contains TARGET. */
4147 if (GET_CODE (temp) == MEM && want_value
4148 && (MEM_VOLATILE_P (temp)
4149 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4150 temp = copy_to_reg (temp);
4152 /* If TEMP is a VOIDmode constant, use convert_modes to make
4153 sure that we properly convert it. */
4154 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4156 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4157 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4158 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4159 GET_MODE (target), temp,
4160 SUBREG_PROMOTED_UNSIGNED_P (target));
4163 convert_move (SUBREG_REG (target), temp,
4164 SUBREG_PROMOTED_UNSIGNED_P (target));
4166 /* If we promoted a constant, change the mode back down to match
4167 target. Otherwise, the caller might get confused by a result whose
4168 mode is larger than expected. */
4170 if (want_value && GET_MODE (temp) != GET_MODE (target)
4171 && GET_MODE (temp) != VOIDmode)
4173 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4174 SUBREG_PROMOTED_VAR_P (temp) = 1;
4175 SUBREG_PROMOTED_UNSIGNED_P (temp)
4176 = SUBREG_PROMOTED_UNSIGNED_P (target);
4179 return want_value ? temp : NULL_RTX;
4183 temp = expand_expr (exp, target, GET_MODE (target), 0);
4184 /* Return TARGET if it's a specified hardware register.
4185 If TARGET is a volatile mem ref, either return TARGET
4186 or return a reg copied *from* TARGET; ANSI requires this.
4188 Otherwise, if TEMP is not TARGET, return TEMP
4189 if it is constant (for efficiency),
4190 or if we really want the correct value. */
4191 if (!(target && GET_CODE (target) == REG
4192 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4193 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4194 && ! rtx_equal_p (temp, target)
4195 && (CONSTANT_P (temp) || want_value))
4196 dont_return_target = 1;
4199 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4200 the same as that of TARGET, adjust the constant. This is needed, for
4201 example, in case it is a CONST_DOUBLE and we want only a word-sized
4203 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4204 && TREE_CODE (exp) != ERROR_MARK
4205 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4206 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4207 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4209 if (current_function_check_memory_usage
4210 && GET_CODE (target) == MEM
4211 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4213 in_check_memory_usage = 1;
4214 if (GET_CODE (temp) == MEM)
4215 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4216 VOIDmode, 3, XEXP (target, 0), Pmode,
4217 XEXP (temp, 0), Pmode,
4218 expr_size (exp), TYPE_MODE (sizetype));
4220 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4221 VOIDmode, 3, XEXP (target, 0), Pmode,
4222 expr_size (exp), TYPE_MODE (sizetype),
4223 GEN_INT (MEMORY_USE_WO),
4224 TYPE_MODE (integer_type_node));
4225 in_check_memory_usage = 0;
4228 /* If value was not generated in the target, store it there.
4229 Convert the value to TARGET's type first if nec. */
4230 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4231 one or both of them are volatile memory refs, we have to distinguish
4233 - expand_expr has used TARGET. In this case, we must not generate
4234 another copy. This can be detected by TARGET being equal according
4236 - expand_expr has not used TARGET - that means that the source just
4237 happens to have the same RTX form. Since temp will have been created
4238 by expand_expr, it will compare unequal according to == .
4239 We must generate a copy in this case, to reach the correct number
4240 of volatile memory references. */
4242 if ((! rtx_equal_p (temp, target)
4243 || (temp != target && (side_effects_p (temp)
4244 || side_effects_p (target))))
4245 && TREE_CODE (exp) != ERROR_MARK
4246 && ! dont_store_target)
4248 target = protect_from_queue (target, 1);
4249 if (GET_MODE (temp) != GET_MODE (target)
4250 && GET_MODE (temp) != VOIDmode)
4252 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4253 if (dont_return_target)
4255 /* In this case, we will return TEMP,
4256 so make sure it has the proper mode.
4257 But don't forget to store the value into TARGET. */
4258 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4259 emit_move_insn (target, temp);
4262 convert_move (target, temp, unsignedp);
4265 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4267 /* Handle copying a string constant into an array.
4268 The string constant may be shorter than the array.
4269 So copy just the string's actual length, and clear the rest. */
4273 /* Get the size of the data type of the string,
4274 which is actually the size of the target. */
4275 size = expr_size (exp);
4276 if (GET_CODE (size) == CONST_INT
4277 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4278 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4281 /* Compute the size of the data to copy from the string. */
4283 = size_binop (MIN_EXPR,
4284 make_tree (sizetype, size),
4285 size_int (TREE_STRING_LENGTH (exp)));
4286 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4287 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4291 /* Copy that much. */
4292 emit_block_move (target, temp, copy_size_rtx,
4293 TYPE_ALIGN (TREE_TYPE (exp)));
4295 /* Figure out how much is left in TARGET that we have to clear.
4296 Do all calculations in ptr_mode. */
4298 addr = XEXP (target, 0);
4299 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4301 if (GET_CODE (copy_size_rtx) == CONST_INT)
4303 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4304 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4306 (unsigned int) (BITS_PER_UNIT
4307 * (INTVAL (copy_size_rtx)
4308 & - INTVAL (copy_size_rtx))));
4312 addr = force_reg (ptr_mode, addr);
4313 addr = expand_binop (ptr_mode, add_optab, addr,
4314 copy_size_rtx, NULL_RTX, 0,
4317 size = expand_binop (ptr_mode, sub_optab, size,
4318 copy_size_rtx, NULL_RTX, 0,
4321 align = BITS_PER_UNIT;
4322 label = gen_label_rtx ();
4323 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4324 GET_MODE (size), 0, 0, label);
4326 align = MIN (align, expr_align (copy_size));
4328 if (size != const0_rtx)
4330 rtx dest = gen_rtx_MEM (BLKmode, addr);
4332 MEM_COPY_ATTRIBUTES (dest, target);
4334 /* Be sure we can write on ADDR. */
4335 in_check_memory_usage = 1;
4336 if (current_function_check_memory_usage)
4337 emit_library_call (chkr_check_addr_libfunc,
4338 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4340 size, TYPE_MODE (sizetype),
4341 GEN_INT (MEMORY_USE_WO),
4342 TYPE_MODE (integer_type_node));
4343 in_check_memory_usage = 0;
4344 clear_storage (dest, size, align);
4351 /* Handle calls that return values in multiple non-contiguous locations.
4352 The Irix 6 ABI has examples of this. */
4353 else if (GET_CODE (target) == PARALLEL)
4354 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4355 TYPE_ALIGN (TREE_TYPE (exp)));
4356 else if (GET_MODE (temp) == BLKmode)
4357 emit_block_move (target, temp, expr_size (exp),
4358 TYPE_ALIGN (TREE_TYPE (exp)));
4360 emit_move_insn (target, temp);
4363 /* If we don't want a value, return NULL_RTX. */
4367 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4368 ??? The latter test doesn't seem to make sense. */
4369 else if (dont_return_target && GET_CODE (temp) != MEM)
4372 /* Return TARGET itself if it is a hard register. */
4373 else if (want_value && GET_MODE (target) != BLKmode
4374 && ! (GET_CODE (target) == REG
4375 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4376 return copy_to_reg (target);
4382 /* Return 1 if EXP just contains zeros. */
4390 switch (TREE_CODE (exp))
4394 case NON_LVALUE_EXPR:
4395 return is_zeros_p (TREE_OPERAND (exp, 0));
4398 return integer_zerop (exp);
4402 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4405 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4408 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4409 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4410 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4411 if (! is_zeros_p (TREE_VALUE (elt)))
4421 /* Return 1 if EXP contains mostly (3/4) zeros. */
4424 mostly_zeros_p (exp)
4427 if (TREE_CODE (exp) == CONSTRUCTOR)
4429 int elts = 0, zeros = 0;
4430 tree elt = CONSTRUCTOR_ELTS (exp);
4431 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4433 /* If there are no ranges of true bits, it is all zero. */
4434 return elt == NULL_TREE;
4436 for (; elt; elt = TREE_CHAIN (elt))
4438 /* We do not handle the case where the index is a RANGE_EXPR,
4439 so the statistic will be somewhat inaccurate.
4440 We do make a more accurate count in store_constructor itself,
4441 so since this function is only used for nested array elements,
4442 this should be close enough. */
4443 if (mostly_zeros_p (TREE_VALUE (elt)))
4448 return 4 * zeros >= 3 * elts;
4451 return is_zeros_p (exp);
4454 /* Helper function for store_constructor.
4455 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4456 TYPE is the type of the CONSTRUCTOR, not the element type.
4457 ALIGN and CLEARED are as for store_constructor.
4458 ALIAS_SET is the alias set to use for any stores.
4460 This provides a recursive shortcut back to store_constructor when it isn't
4461 necessary to go through store_field. This is so that we can pass through
4462 the cleared field to let store_constructor know that we may not have to
4463 clear a substructure if the outer structure has already been cleared. */
4466 store_constructor_field (target, bitsize, bitpos,
4467 mode, exp, type, align, cleared, alias_set)
4469 unsigned HOST_WIDE_INT bitsize;
4470 HOST_WIDE_INT bitpos;
4471 enum machine_mode mode;
4477 if (TREE_CODE (exp) == CONSTRUCTOR
4478 && bitpos % BITS_PER_UNIT == 0
4479 /* If we have a non-zero bitpos for a register target, then we just
4480 let store_field do the bitfield handling. This is unlikely to
4481 generate unnecessary clear instructions anyways. */
4482 && (bitpos == 0 || GET_CODE (target) == MEM))
4486 = adjust_address (target,
4487 GET_MODE (target) == BLKmode
4489 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4490 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4493 /* Show the alignment may no longer be what it was and update the alias
4494 set, if required. */
4496 align = MIN (align, (unsigned int) bitpos & - bitpos);
4497 if (GET_CODE (target) == MEM)
4498 set_mem_alias_set (target, alias_set);
4500 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4503 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4504 int_size_in_bytes (type), alias_set);
4507 /* Store the value of constructor EXP into the rtx TARGET.
4508 TARGET is either a REG or a MEM.
4509 ALIGN is the maximum known alignment for TARGET.
4510 CLEARED is true if TARGET is known to have been zero'd.
4511 SIZE is the number of bytes of TARGET we are allowed to modify: this
4512 may not be the same as the size of EXP if we are assigning to a field
4513 which has been packed to exclude padding bits. */
4516 store_constructor (exp, target, align, cleared, size)
4523 tree type = TREE_TYPE (exp);
4524 #ifdef WORD_REGISTER_OPERATIONS
4525 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4528 /* We know our target cannot conflict, since safe_from_p has been called. */
4530 /* Don't try copying piece by piece into a hard register
4531 since that is vulnerable to being clobbered by EXP.
4532 Instead, construct in a pseudo register and then copy it all. */
4533 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4535 rtx temp = gen_reg_rtx (GET_MODE (target));
4536 store_constructor (exp, temp, align, cleared, size);
4537 emit_move_insn (target, temp);
4542 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4543 || TREE_CODE (type) == QUAL_UNION_TYPE)
4547 /* Inform later passes that the whole union value is dead. */
4548 if ((TREE_CODE (type) == UNION_TYPE
4549 || TREE_CODE (type) == QUAL_UNION_TYPE)
4552 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4554 /* If the constructor is empty, clear the union. */
4555 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4556 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4559 /* If we are building a static constructor into a register,
4560 set the initial value as zero so we can fold the value into
4561 a constant. But if more than one register is involved,
4562 this probably loses. */
4563 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4564 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4567 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4572 /* If the constructor has fewer fields than the structure
4573 or if we are initializing the structure to mostly zeros,
4574 clear the whole structure first. Don't do this if TARGET is a
4575 register whose mode size isn't equal to SIZE since clear_storage
4576 can't handle this case. */
4578 && ((list_length (CONSTRUCTOR_ELTS (exp))
4579 != fields_length (type))
4580 || mostly_zeros_p (exp))
4581 && (GET_CODE (target) != REG
4582 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4585 clear_storage (target, GEN_INT (size), align);
4590 /* Inform later passes that the old value is dead. */
4591 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4593 /* Store each element of the constructor into
4594 the corresponding field of TARGET. */
4596 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4598 tree field = TREE_PURPOSE (elt);
4599 #ifdef WORD_REGISTER_OPERATIONS
4600 tree value = TREE_VALUE (elt);
4602 enum machine_mode mode;
4603 HOST_WIDE_INT bitsize;
4604 HOST_WIDE_INT bitpos = 0;
4607 rtx to_rtx = target;
4609 /* Just ignore missing fields.
4610 We cleared the whole structure, above,
4611 if any fields are missing. */
4615 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4618 if (host_integerp (DECL_SIZE (field), 1))
4619 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4623 unsignedp = TREE_UNSIGNED (field);
4624 mode = DECL_MODE (field);
4625 if (DECL_BIT_FIELD (field))
4628 offset = DECL_FIELD_OFFSET (field);
4629 if (host_integerp (offset, 0)
4630 && host_integerp (bit_position (field), 0))
4632 bitpos = int_bit_position (field);
4636 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4642 if (contains_placeholder_p (offset))
4643 offset = build (WITH_RECORD_EXPR, sizetype,
4644 offset, make_tree (TREE_TYPE (exp), target));
4646 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4647 if (GET_CODE (to_rtx) != MEM)
4650 if (GET_MODE (offset_rtx) != ptr_mode)
4652 #ifdef POINTERS_EXTEND_UNSIGNED
4653 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4655 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4660 = change_address (to_rtx, VOIDmode,
4661 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4662 force_reg (ptr_mode,
4664 align = DECL_OFFSET_ALIGN (field);
4667 if (TREE_READONLY (field))
4669 if (GET_CODE (to_rtx) == MEM)
4670 to_rtx = copy_rtx (to_rtx);
4672 RTX_UNCHANGING_P (to_rtx) = 1;
4675 #ifdef WORD_REGISTER_OPERATIONS
4676 /* If this initializes a field that is smaller than a word, at the
4677 start of a word, try to widen it to a full word.
4678 This special case allows us to output C++ member function
4679 initializations in a form that the optimizers can understand. */
4680 if (GET_CODE (target) == REG
4681 && bitsize < BITS_PER_WORD
4682 && bitpos % BITS_PER_WORD == 0
4683 && GET_MODE_CLASS (mode) == MODE_INT
4684 && TREE_CODE (value) == INTEGER_CST
4686 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4688 tree type = TREE_TYPE (value);
4689 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4691 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4692 value = convert (type, value);
4694 if (BYTES_BIG_ENDIAN)
4696 = fold (build (LSHIFT_EXPR, type, value,
4697 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4698 bitsize = BITS_PER_WORD;
4702 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4703 TREE_VALUE (elt), type, align, cleared,
4704 (DECL_NONADDRESSABLE_P (field)
4705 && GET_CODE (to_rtx) == MEM)
4706 ? MEM_ALIAS_SET (to_rtx)
4707 : get_alias_set (TREE_TYPE (field)));
4710 else if (TREE_CODE (type) == ARRAY_TYPE)
4715 tree domain = TYPE_DOMAIN (type);
4716 tree elttype = TREE_TYPE (type);
4717 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4718 && TYPE_MAX_VALUE (domain)
4719 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4720 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4721 HOST_WIDE_INT minelt = 0;
4722 HOST_WIDE_INT maxelt = 0;
4724 /* If we have constant bounds for the range of the type, get them. */
4727 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4728 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4731 /* If the constructor has fewer elements than the array,
4732 clear the whole array first. Similarly if this is
4733 static constructor of a non-BLKmode object. */
4734 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4738 HOST_WIDE_INT count = 0, zero_count = 0;
4739 need_to_clear = ! const_bounds_p;
4741 /* This loop is a more accurate version of the loop in
4742 mostly_zeros_p (it handles RANGE_EXPR in an index).
4743 It is also needed to check for missing elements. */
4744 for (elt = CONSTRUCTOR_ELTS (exp);
4745 elt != NULL_TREE && ! need_to_clear;
4746 elt = TREE_CHAIN (elt))
4748 tree index = TREE_PURPOSE (elt);
4749 HOST_WIDE_INT this_node_count;
4751 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4753 tree lo_index = TREE_OPERAND (index, 0);
4754 tree hi_index = TREE_OPERAND (index, 1);
4756 if (! host_integerp (lo_index, 1)
4757 || ! host_integerp (hi_index, 1))
4763 this_node_count = (tree_low_cst (hi_index, 1)
4764 - tree_low_cst (lo_index, 1) + 1);
4767 this_node_count = 1;
4769 count += this_node_count;
4770 if (mostly_zeros_p (TREE_VALUE (elt)))
4771 zero_count += this_node_count;
4774 /* Clear the entire array first if there are any missing elements,
4775 or if the incidence of zero elements is >= 75%. */
4777 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4781 if (need_to_clear && size > 0)
4784 clear_storage (target, GEN_INT (size), align);
4787 else if (REG_P (target))
4788 /* Inform later passes that the old value is dead. */
4789 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4791 /* Store each element of the constructor into
4792 the corresponding element of TARGET, determined
4793 by counting the elements. */
4794 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4796 elt = TREE_CHAIN (elt), i++)
4798 enum machine_mode mode;
4799 HOST_WIDE_INT bitsize;
4800 HOST_WIDE_INT bitpos;
4802 tree value = TREE_VALUE (elt);
4803 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4804 tree index = TREE_PURPOSE (elt);
4805 rtx xtarget = target;
4807 if (cleared && is_zeros_p (value))
4810 unsignedp = TREE_UNSIGNED (elttype);
4811 mode = TYPE_MODE (elttype);
4812 if (mode == BLKmode)
4813 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4814 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4817 bitsize = GET_MODE_BITSIZE (mode);
4819 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4821 tree lo_index = TREE_OPERAND (index, 0);
4822 tree hi_index = TREE_OPERAND (index, 1);
4823 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4824 struct nesting *loop;
4825 HOST_WIDE_INT lo, hi, count;
4828 /* If the range is constant and "small", unroll the loop. */
4830 && host_integerp (lo_index, 0)
4831 && host_integerp (hi_index, 0)
4832 && (lo = tree_low_cst (lo_index, 0),
4833 hi = tree_low_cst (hi_index, 0),
4834 count = hi - lo + 1,
4835 (GET_CODE (target) != MEM
4837 || (host_integerp (TYPE_SIZE (elttype), 1)
4838 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4841 lo -= minelt; hi -= minelt;
4842 for (; lo <= hi; lo++)
4844 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4845 store_constructor_field
4846 (target, bitsize, bitpos, mode, value, type, align,
4848 TYPE_NONALIASED_COMPONENT (type)
4849 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4854 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4855 loop_top = gen_label_rtx ();
4856 loop_end = gen_label_rtx ();
4858 unsignedp = TREE_UNSIGNED (domain);
4860 index = build_decl (VAR_DECL, NULL_TREE, domain);
4863 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4865 SET_DECL_RTL (index, index_r);
4866 if (TREE_CODE (value) == SAVE_EXPR
4867 && SAVE_EXPR_RTL (value) == 0)
4869 /* Make sure value gets expanded once before the
4871 expand_expr (value, const0_rtx, VOIDmode, 0);
4874 store_expr (lo_index, index_r, 0);
4875 loop = expand_start_loop (0);
4877 /* Assign value to element index. */
4879 = convert (ssizetype,
4880 fold (build (MINUS_EXPR, TREE_TYPE (index),
4881 index, TYPE_MIN_VALUE (domain))));
4882 position = size_binop (MULT_EXPR, position,
4884 TYPE_SIZE_UNIT (elttype)));
4886 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4887 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4888 xtarget = change_address (target, mode, addr);
4889 if (TREE_CODE (value) == CONSTRUCTOR)
4890 store_constructor (value, xtarget, align, cleared,
4891 bitsize / BITS_PER_UNIT);
4893 store_expr (value, xtarget, 0);
4895 expand_exit_loop_if_false (loop,
4896 build (LT_EXPR, integer_type_node,
4899 expand_increment (build (PREINCREMENT_EXPR,
4901 index, integer_one_node), 0, 0);
4903 emit_label (loop_end);
4906 else if ((index != 0 && ! host_integerp (index, 0))
4907 || ! host_integerp (TYPE_SIZE (elttype), 1))
4913 index = ssize_int (1);
4916 index = convert (ssizetype,
4917 fold (build (MINUS_EXPR, index,
4918 TYPE_MIN_VALUE (domain))));
4920 position = size_binop (MULT_EXPR, index,
4922 TYPE_SIZE_UNIT (elttype)));
4923 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4924 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4925 xtarget = change_address (target, mode, addr);
4926 store_expr (value, xtarget, 0);
4931 bitpos = ((tree_low_cst (index, 0) - minelt)
4932 * tree_low_cst (TYPE_SIZE (elttype), 1));
4934 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4936 store_constructor_field (target, bitsize, bitpos, mode, value,
4937 type, align, cleared,
4938 TYPE_NONALIASED_COMPONENT (type)
4939 && GET_CODE (target) == MEM
4940 ? MEM_ALIAS_SET (target) :
4941 get_alias_set (elttype));
4947 /* Set constructor assignments. */
4948 else if (TREE_CODE (type) == SET_TYPE)
4950 tree elt = CONSTRUCTOR_ELTS (exp);
4951 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4952 tree domain = TYPE_DOMAIN (type);
4953 tree domain_min, domain_max, bitlength;
4955 /* The default implementation strategy is to extract the constant
4956 parts of the constructor, use that to initialize the target,
4957 and then "or" in whatever non-constant ranges we need in addition.
4959 If a large set is all zero or all ones, it is
4960 probably better to set it using memset (if available) or bzero.
4961 Also, if a large set has just a single range, it may also be
4962 better to first clear all the first clear the set (using
4963 bzero/memset), and set the bits we want. */
4965 /* Check for all zeros. */
4966 if (elt == NULL_TREE && size > 0)
4969 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4973 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4974 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4975 bitlength = size_binop (PLUS_EXPR,
4976 size_diffop (domain_max, domain_min),
4979 nbits = tree_low_cst (bitlength, 1);
4981 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4982 are "complicated" (more than one range), initialize (the
4983 constant parts) by copying from a constant. */
4984 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4985 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4987 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4988 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4989 char *bit_buffer = (char *) alloca (nbits);
4990 HOST_WIDE_INT word = 0;
4991 unsigned int bit_pos = 0;
4992 unsigned int ibit = 0;
4993 unsigned int offset = 0; /* In bytes from beginning of set. */
4995 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4998 if (bit_buffer[ibit])
5000 if (BYTES_BIG_ENDIAN)
5001 word |= (1 << (set_word_size - 1 - bit_pos));
5003 word |= 1 << bit_pos;
5007 if (bit_pos >= set_word_size || ibit == nbits)
5009 if (word != 0 || ! cleared)
5011 rtx datum = GEN_INT (word);
5014 /* The assumption here is that it is safe to use
5015 XEXP if the set is multi-word, but not if
5016 it's single-word. */
5017 if (GET_CODE (target) == MEM)
5018 to_rtx = adjust_address (target, mode, offset);
5019 else if (offset == 0)
5023 emit_move_insn (to_rtx, datum);
5030 offset += set_word_size / BITS_PER_UNIT;
5035 /* Don't bother clearing storage if the set is all ones. */
5036 if (TREE_CHAIN (elt) != NULL_TREE
5037 || (TREE_PURPOSE (elt) == NULL_TREE
5039 : ( ! host_integerp (TREE_VALUE (elt), 0)
5040 || ! host_integerp (TREE_PURPOSE (elt), 0)
5041 || (tree_low_cst (TREE_VALUE (elt), 0)
5042 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5043 != (HOST_WIDE_INT) nbits))))
5044 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5046 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5048 /* Start of range of element or NULL. */
5049 tree startbit = TREE_PURPOSE (elt);
5050 /* End of range of element, or element value. */
5051 tree endbit = TREE_VALUE (elt);
5052 #ifdef TARGET_MEM_FUNCTIONS
5053 HOST_WIDE_INT startb, endb;
5055 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5057 bitlength_rtx = expand_expr (bitlength,
5058 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5060 /* Handle non-range tuple element like [ expr ]. */
5061 if (startbit == NULL_TREE)
5063 startbit = save_expr (endbit);
5067 startbit = convert (sizetype, startbit);
5068 endbit = convert (sizetype, endbit);
5069 if (! integer_zerop (domain_min))
5071 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5072 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5074 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5075 EXPAND_CONST_ADDRESS);
5076 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5077 EXPAND_CONST_ADDRESS);
5083 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5086 emit_move_insn (targetx, target);
5089 else if (GET_CODE (target) == MEM)
5094 #ifdef TARGET_MEM_FUNCTIONS
5095 /* Optimization: If startbit and endbit are
5096 constants divisible by BITS_PER_UNIT,
5097 call memset instead. */
5098 if (TREE_CODE (startbit) == INTEGER_CST
5099 && TREE_CODE (endbit) == INTEGER_CST
5100 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5101 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5103 emit_library_call (memset_libfunc, LCT_NORMAL,
5105 plus_constant (XEXP (targetx, 0),
5106 startb / BITS_PER_UNIT),
5108 constm1_rtx, TYPE_MODE (integer_type_node),
5109 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5110 TYPE_MODE (sizetype));
5114 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5115 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5116 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5117 startbit_rtx, TYPE_MODE (sizetype),
5118 endbit_rtx, TYPE_MODE (sizetype));
5121 emit_move_insn (target, targetx);
5129 /* Store the value of EXP (an expression tree)
5130 into a subfield of TARGET which has mode MODE and occupies
5131 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5132 If MODE is VOIDmode, it means that we are storing into a bit-field.
5134 If VALUE_MODE is VOIDmode, return nothing in particular.
5135 UNSIGNEDP is not used in this case.
5137 Otherwise, return an rtx for the value stored. This rtx
5138 has mode VALUE_MODE if that is convenient to do.
5139 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5141 ALIGN is the alignment that TARGET is known to have.
5142 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5144 ALIAS_SET is the alias set for the destination. This value will
5145 (in general) be different from that for TARGET, since TARGET is a
5146 reference to the containing structure. */
5149 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5150 unsignedp, align, total_size, alias_set)
5152 HOST_WIDE_INT bitsize;
5153 HOST_WIDE_INT bitpos;
5154 enum machine_mode mode;
5156 enum machine_mode value_mode;
5159 HOST_WIDE_INT total_size;
5162 HOST_WIDE_INT width_mask = 0;
5164 if (TREE_CODE (exp) == ERROR_MARK)
5167 /* If we have nothing to store, do nothing unless the expression has
5170 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5172 if (bitsize < HOST_BITS_PER_WIDE_INT)
5173 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5175 /* If we are storing into an unaligned field of an aligned union that is
5176 in a register, we may have the mode of TARGET being an integer mode but
5177 MODE == BLKmode. In that case, get an aligned object whose size and
5178 alignment are the same as TARGET and store TARGET into it (we can avoid
5179 the store if the field being stored is the entire width of TARGET). Then
5180 call ourselves recursively to store the field into a BLKmode version of
5181 that object. Finally, load from the object into TARGET. This is not
5182 very efficient in general, but should only be slightly more expensive
5183 than the otherwise-required unaligned accesses. Perhaps this can be
5184 cleaned up later. */
5187 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5191 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5194 rtx blk_object = copy_rtx (object);
5196 PUT_MODE (blk_object, BLKmode);
5198 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5199 emit_move_insn (object, target);
5201 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5202 align, total_size, alias_set);
5204 /* Even though we aren't returning target, we need to
5205 give it the updated value. */
5206 emit_move_insn (target, object);
5211 if (GET_CODE (target) == CONCAT)
5213 /* We're storing into a struct containing a single __complex. */
5217 return store_expr (exp, target, 0);
5220 /* If the structure is in a register or if the component
5221 is a bit field, we cannot use addressing to access it.
5222 Use bit-field techniques or SUBREG to store in it. */
5224 if (mode == VOIDmode
5225 || (mode != BLKmode && ! direct_store[(int) mode]
5226 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5227 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5228 || GET_CODE (target) == REG
5229 || GET_CODE (target) == SUBREG
5230 /* If the field isn't aligned enough to store as an ordinary memref,
5231 store it as a bit field. */
5232 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5233 && (align < GET_MODE_ALIGNMENT (mode)
5234 || bitpos % GET_MODE_ALIGNMENT (mode)))
5235 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5236 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5237 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5238 /* If the RHS and field are a constant size and the size of the
5239 RHS isn't the same size as the bitfield, we must use bitfield
5242 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5243 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5245 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5247 /* If BITSIZE is narrower than the size of the type of EXP
5248 we will be narrowing TEMP. Normally, what's wanted are the
5249 low-order bits. However, if EXP's type is a record and this is
5250 big-endian machine, we want the upper BITSIZE bits. */
5251 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5252 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5253 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5254 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5255 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5259 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5261 if (mode != VOIDmode && mode != BLKmode
5262 && mode != TYPE_MODE (TREE_TYPE (exp)))
5263 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5265 /* If the modes of TARGET and TEMP are both BLKmode, both
5266 must be in memory and BITPOS must be aligned on a byte
5267 boundary. If so, we simply do a block copy. */
5268 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5270 unsigned int exp_align = expr_align (exp);
5272 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5273 || bitpos % BITS_PER_UNIT != 0)
5276 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5278 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5279 align = MIN (exp_align, align);
5281 /* Find an alignment that is consistent with the bit position. */
5282 while ((bitpos % align) != 0)
5285 emit_block_move (target, temp,
5286 bitsize == -1 ? expr_size (exp)
5287 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5291 return value_mode == VOIDmode ? const0_rtx : target;
5294 /* Store the value in the bitfield. */
5295 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5296 if (value_mode != VOIDmode)
5298 /* The caller wants an rtx for the value. */
5299 /* If possible, avoid refetching from the bitfield itself. */
5301 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5304 enum machine_mode tmode;
5307 return expand_and (temp,
5311 GET_MODE (temp) == VOIDmode
5313 : GET_MODE (temp))), NULL_RTX);
5314 tmode = GET_MODE (temp);
5315 if (tmode == VOIDmode)
5317 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5318 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5319 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5321 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5322 NULL_RTX, value_mode, 0, align,
5329 rtx addr = XEXP (target, 0);
5332 /* If a value is wanted, it must be the lhs;
5333 so make the address stable for multiple use. */
5335 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5336 && ! CONSTANT_ADDRESS_P (addr)
5337 /* A frame-pointer reference is already stable. */
5338 && ! (GET_CODE (addr) == PLUS
5339 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5340 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5341 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5342 target = replace_equiv_address (target, copy_to_reg (addr));
5344 /* Now build a reference to just the desired component. */
5346 to_rtx = copy_rtx (adjust_address (target, mode,
5347 bitpos / BITS_PER_UNIT));
5349 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5350 /* If the address of the structure varies, then it might be on
5351 the stack. And, stack slots may be shared across scopes.
5352 So, two different structures, of different types, can end up
5353 at the same location. We will give the structures alias set
5354 zero; here we must be careful not to give non-zero alias sets
5356 set_mem_alias_set (to_rtx,
5357 rtx_varies_p (addr, /*for_alias=*/0)
5360 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5364 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5365 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5366 codes and find the ultimate containing object, which we return.
5368 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5369 bit position, and *PUNSIGNEDP to the signedness of the field.
5370 If the position of the field is variable, we store a tree
5371 giving the variable offset (in units) in *POFFSET.
5372 This offset is in addition to the bit position.
5373 If the position is not variable, we store 0 in *POFFSET.
5374 We set *PALIGNMENT to the alignment of the address that will be
5375 computed. This is the alignment of the thing we return if *POFFSET
5376 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5378 If any of the extraction expressions is volatile,
5379 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5381 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5382 is a mode that can be used to access the field. In that case, *PBITSIZE
5385 If the field describes a variable-sized object, *PMODE is set to
5386 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5387 this case, but the address of the object can be found. */
5390 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5391 punsignedp, pvolatilep, palignment)
5393 HOST_WIDE_INT *pbitsize;
5394 HOST_WIDE_INT *pbitpos;
5396 enum machine_mode *pmode;
5399 unsigned int *palignment;
5402 enum machine_mode mode = VOIDmode;
5403 tree offset = size_zero_node;
5404 tree bit_offset = bitsize_zero_node;
5405 unsigned int alignment = BIGGEST_ALIGNMENT;
5406 tree placeholder_ptr = 0;
5409 /* First get the mode, signedness, and size. We do this from just the
5410 outermost expression. */
5411 if (TREE_CODE (exp) == COMPONENT_REF)
5413 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5414 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5415 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5417 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5419 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5421 size_tree = TREE_OPERAND (exp, 1);
5422 *punsignedp = TREE_UNSIGNED (exp);
5426 mode = TYPE_MODE (TREE_TYPE (exp));
5427 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5429 if (mode == BLKmode)
5430 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5432 *pbitsize = GET_MODE_BITSIZE (mode);
5437 if (! host_integerp (size_tree, 1))
5438 mode = BLKmode, *pbitsize = -1;
5440 *pbitsize = tree_low_cst (size_tree, 1);
5443 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5444 and find the ultimate containing object. */
5447 if (TREE_CODE (exp) == BIT_FIELD_REF)
5448 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5449 else if (TREE_CODE (exp) == COMPONENT_REF)
5451 tree field = TREE_OPERAND (exp, 1);
5452 tree this_offset = DECL_FIELD_OFFSET (field);
5454 /* If this field hasn't been filled in yet, don't go
5455 past it. This should only happen when folding expressions
5456 made during type construction. */
5457 if (this_offset == 0)
5459 else if (! TREE_CONSTANT (this_offset)
5460 && contains_placeholder_p (this_offset))
5461 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5463 offset = size_binop (PLUS_EXPR, offset, this_offset);
5464 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5465 DECL_FIELD_BIT_OFFSET (field));
5467 if (! host_integerp (offset, 0))
5468 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5471 else if (TREE_CODE (exp) == ARRAY_REF
5472 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5474 tree index = TREE_OPERAND (exp, 1);
5475 tree array = TREE_OPERAND (exp, 0);
5476 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5477 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5478 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5480 /* We assume all arrays have sizes that are a multiple of a byte.
5481 First subtract the lower bound, if any, in the type of the
5482 index, then convert to sizetype and multiply by the size of the
5484 if (low_bound != 0 && ! integer_zerop (low_bound))
5485 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5488 /* If the index has a self-referential type, pass it to a
5489 WITH_RECORD_EXPR; if the component size is, pass our
5490 component to one. */
5491 if (! TREE_CONSTANT (index)
5492 && contains_placeholder_p (index))
5493 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5494 if (! TREE_CONSTANT (unit_size)
5495 && contains_placeholder_p (unit_size))
5496 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5498 offset = size_binop (PLUS_EXPR, offset,
5499 size_binop (MULT_EXPR,
5500 convert (sizetype, index),
5504 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5506 exp = find_placeholder (exp, &placeholder_ptr);
5509 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5510 && ! ((TREE_CODE (exp) == NOP_EXPR
5511 || TREE_CODE (exp) == CONVERT_EXPR)
5512 && (TYPE_MODE (TREE_TYPE (exp))
5513 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5516 /* If any reference in the chain is volatile, the effect is volatile. */
5517 if (TREE_THIS_VOLATILE (exp))
5520 /* If the offset is non-constant already, then we can't assume any
5521 alignment more than the alignment here. */
5522 if (! TREE_CONSTANT (offset))
5523 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5525 exp = TREE_OPERAND (exp, 0);
5529 alignment = MIN (alignment, DECL_ALIGN (exp));
5530 else if (TREE_TYPE (exp) != 0)
5531 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5533 /* If OFFSET is constant, see if we can return the whole thing as a
5534 constant bit position. Otherwise, split it up. */
5535 if (host_integerp (offset, 0)
5536 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5538 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5539 && host_integerp (tem, 0))
5540 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5542 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5545 *palignment = alignment;
5549 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5551 static enum memory_use_mode
5552 get_memory_usage_from_modifier (modifier)
5553 enum expand_modifier modifier;
5559 return MEMORY_USE_RO;
5561 case EXPAND_MEMORY_USE_WO:
5562 return MEMORY_USE_WO;
5564 case EXPAND_MEMORY_USE_RW:
5565 return MEMORY_USE_RW;
5567 case EXPAND_MEMORY_USE_DONT:
5568 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5569 MEMORY_USE_DONT, because they are modifiers to a call of
5570 expand_expr in the ADDR_EXPR case of expand_expr. */
5571 case EXPAND_CONST_ADDRESS:
5572 case EXPAND_INITIALIZER:
5573 return MEMORY_USE_DONT;
5574 case EXPAND_MEMORY_USE_BAD:
5580 /* Given an rtx VALUE that may contain additions and multiplications, return
5581 an equivalent value that just refers to a register, memory, or constant.
5582 This is done by generating instructions to perform the arithmetic and
5583 returning a pseudo-register containing the value.
5585 The returned value may be a REG, SUBREG, MEM or constant. */
5588 force_operand (value, target)
5592 /* Use a temporary to force order of execution of calls to
5596 /* Use subtarget as the target for operand 0 of a binary operation. */
5597 rtx subtarget = get_subtarget (target);
5599 /* Check for a PIC address load. */
5601 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5602 && XEXP (value, 0) == pic_offset_table_rtx
5603 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5604 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5605 || GET_CODE (XEXP (value, 1)) == CONST))
5608 subtarget = gen_reg_rtx (GET_MODE (value));
5609 emit_move_insn (subtarget, value);
5613 if (GET_CODE (value) == PLUS)
5614 binoptab = add_optab;
5615 else if (GET_CODE (value) == MINUS)
5616 binoptab = sub_optab;
5617 else if (GET_CODE (value) == MULT)
5619 op2 = XEXP (value, 1);
5620 if (!CONSTANT_P (op2)
5621 && !(GET_CODE (op2) == REG && op2 != subtarget))
5623 tmp = force_operand (XEXP (value, 0), subtarget);
5624 return expand_mult (GET_MODE (value), tmp,
5625 force_operand (op2, NULL_RTX),
5631 op2 = XEXP (value, 1);
5632 if (!CONSTANT_P (op2)
5633 && !(GET_CODE (op2) == REG && op2 != subtarget))
5635 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5637 binoptab = add_optab;
5638 op2 = negate_rtx (GET_MODE (value), op2);
5641 /* Check for an addition with OP2 a constant integer and our first
5642 operand a PLUS of a virtual register and something else. In that
5643 case, we want to emit the sum of the virtual register and the
5644 constant first and then add the other value. This allows virtual
5645 register instantiation to simply modify the constant rather than
5646 creating another one around this addition. */
5647 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5648 && GET_CODE (XEXP (value, 0)) == PLUS
5649 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5650 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5651 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5653 rtx temp = expand_binop (GET_MODE (value), binoptab,
5654 XEXP (XEXP (value, 0), 0), op2,
5655 subtarget, 0, OPTAB_LIB_WIDEN);
5656 return expand_binop (GET_MODE (value), binoptab, temp,
5657 force_operand (XEXP (XEXP (value, 0), 1), 0),
5658 target, 0, OPTAB_LIB_WIDEN);
5661 tmp = force_operand (XEXP (value, 0), subtarget);
5662 return expand_binop (GET_MODE (value), binoptab, tmp,
5663 force_operand (op2, NULL_RTX),
5664 target, 0, OPTAB_LIB_WIDEN);
5665 /* We give UNSIGNEDP = 0 to expand_binop
5666 because the only operations we are expanding here are signed ones. */
5671 /* Subroutine of expand_expr: return nonzero iff there is no way that
5672 EXP can reference X, which is being modified. TOP_P is nonzero if this
5673 call is going to be used to determine whether we need a temporary
5674 for EXP, as opposed to a recursive call to this function.
5676 It is always safe for this routine to return zero since it merely
5677 searches for optimization opportunities. */
5680 safe_from_p (x, exp, top_p)
5687 static tree save_expr_list;
5690 /* If EXP has varying size, we MUST use a target since we currently
5691 have no way of allocating temporaries of variable size
5692 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5693 So we assume here that something at a higher level has prevented a
5694 clash. This is somewhat bogus, but the best we can do. Only
5695 do this when X is BLKmode and when we are at the top level. */
5696 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5697 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5698 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5699 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5700 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5702 && GET_MODE (x) == BLKmode)
5703 /* If X is in the outgoing argument area, it is always safe. */
5704 || (GET_CODE (x) == MEM
5705 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5706 || (GET_CODE (XEXP (x, 0)) == PLUS
5707 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5710 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5711 find the underlying pseudo. */
5712 if (GET_CODE (x) == SUBREG)
5715 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5719 /* A SAVE_EXPR might appear many times in the expression passed to the
5720 top-level safe_from_p call, and if it has a complex subexpression,
5721 examining it multiple times could result in a combinatorial explosion.
5722 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5723 with optimization took about 28 minutes to compile -- even though it was
5724 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5725 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5726 we have processed. Note that the only test of top_p was above. */
5735 rtn = safe_from_p (x, exp, 0);
5737 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5738 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5743 /* Now look at our tree code and possibly recurse. */
5744 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5747 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5754 if (TREE_CODE (exp) == TREE_LIST)
5755 return ((TREE_VALUE (exp) == 0
5756 || safe_from_p (x, TREE_VALUE (exp), 0))
5757 && (TREE_CHAIN (exp) == 0
5758 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5759 else if (TREE_CODE (exp) == ERROR_MARK)
5760 return 1; /* An already-visited SAVE_EXPR? */
5765 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5769 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5770 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5774 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5775 the expression. If it is set, we conflict iff we are that rtx or
5776 both are in memory. Otherwise, we check all operands of the
5777 expression recursively. */
5779 switch (TREE_CODE (exp))
5782 return (staticp (TREE_OPERAND (exp, 0))
5783 || TREE_STATIC (exp)
5784 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5787 if (GET_CODE (x) == MEM
5788 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5789 get_alias_set (exp)))
5794 /* Assume that the call will clobber all hard registers and
5796 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5797 || GET_CODE (x) == MEM)
5802 /* If a sequence exists, we would have to scan every instruction
5803 in the sequence to see if it was safe. This is probably not
5805 if (RTL_EXPR_SEQUENCE (exp))
5808 exp_rtl = RTL_EXPR_RTL (exp);
5811 case WITH_CLEANUP_EXPR:
5812 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5815 case CLEANUP_POINT_EXPR:
5816 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5819 exp_rtl = SAVE_EXPR_RTL (exp);
5823 /* If we've already scanned this, don't do it again. Otherwise,
5824 show we've scanned it and record for clearing the flag if we're
5826 if (TREE_PRIVATE (exp))
5829 TREE_PRIVATE (exp) = 1;
5830 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5832 TREE_PRIVATE (exp) = 0;
5836 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5840 /* The only operand we look at is operand 1. The rest aren't
5841 part of the expression. */
5842 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5844 case METHOD_CALL_EXPR:
5845 /* This takes an rtx argument, but shouldn't appear here. */
5852 /* If we have an rtx, we do not need to scan our operands. */
5856 nops = first_rtl_op (TREE_CODE (exp));
5857 for (i = 0; i < nops; i++)
5858 if (TREE_OPERAND (exp, i) != 0
5859 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5862 /* If this is a language-specific tree code, it may require
5863 special handling. */
5864 if ((unsigned int) TREE_CODE (exp)
5865 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5867 && !(*lang_safe_from_p) (x, exp))
5871 /* If we have an rtl, find any enclosed object. Then see if we conflict
5875 if (GET_CODE (exp_rtl) == SUBREG)
5877 exp_rtl = SUBREG_REG (exp_rtl);
5878 if (GET_CODE (exp_rtl) == REG
5879 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5883 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5884 are memory and they conflict. */
5885 return ! (rtx_equal_p (x, exp_rtl)
5886 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5887 && true_dependence (exp_rtl, GET_MODE (x), x,
5888 rtx_addr_varies_p)));
5891 /* If we reach here, it is safe. */
5895 /* Subroutine of expand_expr: return rtx if EXP is a
5896 variable or parameter; else return 0. */
5903 switch (TREE_CODE (exp))
5907 return DECL_RTL (exp);
5913 #ifdef MAX_INTEGER_COMPUTATION_MODE
5916 check_max_integer_computation_mode (exp)
5919 enum tree_code code;
5920 enum machine_mode mode;
5922 /* Strip any NOPs that don't change the mode. */
5924 code = TREE_CODE (exp);
5926 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5927 if (code == NOP_EXPR
5928 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5931 /* First check the type of the overall operation. We need only look at
5932 unary, binary and relational operations. */
5933 if (TREE_CODE_CLASS (code) == '1'
5934 || TREE_CODE_CLASS (code) == '2'
5935 || TREE_CODE_CLASS (code) == '<')
5937 mode = TYPE_MODE (TREE_TYPE (exp));
5938 if (GET_MODE_CLASS (mode) == MODE_INT
5939 && mode > MAX_INTEGER_COMPUTATION_MODE)
5940 internal_error ("unsupported wide integer operation");
5943 /* Check operand of a unary op. */
5944 if (TREE_CODE_CLASS (code) == '1')
5946 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5947 if (GET_MODE_CLASS (mode) == MODE_INT
5948 && mode > MAX_INTEGER_COMPUTATION_MODE)
5949 internal_error ("unsupported wide integer operation");
5952 /* Check operands of a binary/comparison op. */
5953 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5955 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5956 if (GET_MODE_CLASS (mode) == MODE_INT
5957 && mode > MAX_INTEGER_COMPUTATION_MODE)
5958 internal_error ("unsupported wide integer operation");
5960 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5961 if (GET_MODE_CLASS (mode) == MODE_INT
5962 && mode > MAX_INTEGER_COMPUTATION_MODE)
5963 internal_error ("unsupported wide integer operation");
5968 /* Return an object on the placeholder list that matches EXP, a
5969 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5970 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5971 tree.def. If no such object is found, abort. If PLIST is nonzero, it is
5972 a location which initially points to a starting location in the
5973 placeholder list (zero means start of the list) and where a pointer into
5974 the placeholder list at which the object is found is placed. */
5977 find_placeholder (exp, plist)
5981 tree type = TREE_TYPE (exp);
5982 tree placeholder_expr;
5984 for (placeholder_expr
5985 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5986 placeholder_expr != 0;
5987 placeholder_expr = TREE_CHAIN (placeholder_expr))
5989 tree need_type = TYPE_MAIN_VARIANT (type);
5992 /* Find the outermost reference that is of the type we want. If none,
5993 see if any object has a type that is a pointer to the type we
5995 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5996 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5997 || TREE_CODE (elt) == COND_EXPR)
5998 ? TREE_OPERAND (elt, 1)
5999 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6000 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6001 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6002 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6003 ? TREE_OPERAND (elt, 0) : 0))
6004 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6007 *plist = placeholder_expr;
6011 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6013 = ((TREE_CODE (elt) == COMPOUND_EXPR
6014 || TREE_CODE (elt) == COND_EXPR)
6015 ? TREE_OPERAND (elt, 1)
6016 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6017 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6018 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6019 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6020 ? TREE_OPERAND (elt, 0) : 0))
6021 if (POINTER_TYPE_P (TREE_TYPE (elt))
6022 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6026 *plist = placeholder_expr;
6027 return build1 (INDIRECT_REF, need_type, elt);
6034 /* expand_expr: generate code for computing expression EXP.
6035 An rtx for the computed value is returned. The value is never null.
6036 In the case of a void EXP, const0_rtx is returned.
6038 The value may be stored in TARGET if TARGET is nonzero.
6039 TARGET is just a suggestion; callers must assume that
6040 the rtx returned may not be the same as TARGET.
6042 If TARGET is CONST0_RTX, it means that the value will be ignored.
6044 If TMODE is not VOIDmode, it suggests generating the
6045 result in mode TMODE. But this is done only when convenient.
6046 Otherwise, TMODE is ignored and the value generated in its natural mode.
6047 TMODE is just a suggestion; callers must assume that
6048 the rtx returned may not have mode TMODE.
6050 Note that TARGET may have neither TMODE nor MODE. In that case, it
6051 probably will not be used.
6053 If MODIFIER is EXPAND_SUM then when EXP is an addition
6054 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6055 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6056 products as above, or REG or MEM, or constant.
6057 Ordinarily in such cases we would output mul or add instructions
6058 and then return a pseudo reg containing the sum.
6060 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6061 it also marks a label as absolutely required (it can't be dead).
6062 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6063 This is used for outputting expressions used in initializers.
6065 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6066 with a constant address even if that address is not normally legitimate.
6067 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6070 expand_expr (exp, target, tmode, modifier)
6073 enum machine_mode tmode;
6074 enum expand_modifier modifier;
6077 tree type = TREE_TYPE (exp);
6078 int unsignedp = TREE_UNSIGNED (type);
6079 enum machine_mode mode;
6080 enum tree_code code = TREE_CODE (exp);
6082 rtx subtarget, original_target;
6085 /* Used by check-memory-usage to make modifier read only. */
6086 enum expand_modifier ro_modifier;
6088 /* Handle ERROR_MARK before anybody tries to access its type. */
6089 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6091 op0 = CONST0_RTX (tmode);
6097 mode = TYPE_MODE (type);
6098 /* Use subtarget as the target for operand 0 of a binary operation. */
6099 subtarget = get_subtarget (target);
6100 original_target = target;
6101 ignore = (target == const0_rtx
6102 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6103 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6104 || code == COND_EXPR)
6105 && TREE_CODE (type) == VOID_TYPE));
6107 /* Make a read-only version of the modifier. */
6108 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6109 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6110 ro_modifier = modifier;
6112 ro_modifier = EXPAND_NORMAL;
6114 /* If we are going to ignore this result, we need only do something
6115 if there is a side-effect somewhere in the expression. If there
6116 is, short-circuit the most common cases here. Note that we must
6117 not call expand_expr with anything but const0_rtx in case this
6118 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6122 if (! TREE_SIDE_EFFECTS (exp))
6125 /* Ensure we reference a volatile object even if value is ignored, but
6126 don't do this if all we are doing is taking its address. */
6127 if (TREE_THIS_VOLATILE (exp)
6128 && TREE_CODE (exp) != FUNCTION_DECL
6129 && mode != VOIDmode && mode != BLKmode
6130 && modifier != EXPAND_CONST_ADDRESS)
6132 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6133 if (GET_CODE (temp) == MEM)
6134 temp = copy_to_reg (temp);
6138 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6139 || code == INDIRECT_REF || code == BUFFER_REF)
6140 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6141 VOIDmode, ro_modifier);
6142 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6143 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6145 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6147 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6151 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6152 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6153 /* If the second operand has no side effects, just evaluate
6155 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6156 VOIDmode, ro_modifier);
6157 else if (code == BIT_FIELD_REF)
6159 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6161 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6163 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6171 #ifdef MAX_INTEGER_COMPUTATION_MODE
6172 /* Only check stuff here if the mode we want is different from the mode
6173 of the expression; if it's the same, check_max_integer_computiation_mode
6174 will handle it. Do we really need to check this stuff at all? */
6177 && GET_MODE (target) != mode
6178 && TREE_CODE (exp) != INTEGER_CST
6179 && TREE_CODE (exp) != PARM_DECL
6180 && TREE_CODE (exp) != ARRAY_REF
6181 && TREE_CODE (exp) != ARRAY_RANGE_REF
6182 && TREE_CODE (exp) != COMPONENT_REF
6183 && TREE_CODE (exp) != BIT_FIELD_REF
6184 && TREE_CODE (exp) != INDIRECT_REF
6185 && TREE_CODE (exp) != CALL_EXPR
6186 && TREE_CODE (exp) != VAR_DECL
6187 && TREE_CODE (exp) != RTL_EXPR)
6189 enum machine_mode mode = GET_MODE (target);
6191 if (GET_MODE_CLASS (mode) == MODE_INT
6192 && mode > MAX_INTEGER_COMPUTATION_MODE)
6193 internal_error ("unsupported wide integer operation");
6197 && TREE_CODE (exp) != INTEGER_CST
6198 && TREE_CODE (exp) != PARM_DECL
6199 && TREE_CODE (exp) != ARRAY_REF
6200 && TREE_CODE (exp) != ARRAY_RANGE_REF
6201 && TREE_CODE (exp) != COMPONENT_REF
6202 && TREE_CODE (exp) != BIT_FIELD_REF
6203 && TREE_CODE (exp) != INDIRECT_REF
6204 && TREE_CODE (exp) != VAR_DECL
6205 && TREE_CODE (exp) != CALL_EXPR
6206 && TREE_CODE (exp) != RTL_EXPR
6207 && GET_MODE_CLASS (tmode) == MODE_INT
6208 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6209 internal_error ("unsupported wide integer operation");
6211 check_max_integer_computation_mode (exp);
6214 /* If will do cse, generate all results into pseudo registers
6215 since 1) that allows cse to find more things
6216 and 2) otherwise cse could produce an insn the machine
6219 if (! cse_not_expected && mode != BLKmode && target
6220 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6227 tree function = decl_function_context (exp);
6228 /* Handle using a label in a containing function. */
6229 if (function != current_function_decl
6230 && function != inline_function_decl && function != 0)
6232 struct function *p = find_function_data (function);
6233 p->expr->x_forced_labels
6234 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6235 p->expr->x_forced_labels);
6239 if (modifier == EXPAND_INITIALIZER)
6240 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6245 temp = gen_rtx_MEM (FUNCTION_MODE,
6246 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6247 if (function != current_function_decl
6248 && function != inline_function_decl && function != 0)
6249 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6254 if (DECL_RTL (exp) == 0)
6256 error_with_decl (exp, "prior parameter's size depends on `%s'");
6257 return CONST0_RTX (mode);
6260 /* ... fall through ... */
6263 /* If a static var's type was incomplete when the decl was written,
6264 but the type is complete now, lay out the decl now. */
6265 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6266 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6268 layout_decl (exp, 0);
6269 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6272 /* Although static-storage variables start off initialized, according to
6273 ANSI C, a memcpy could overwrite them with uninitialized values. So
6274 we check them too. This also lets us check for read-only variables
6275 accessed via a non-const declaration, in case it won't be detected
6276 any other way (e.g., in an embedded system or OS kernel without
6279 Aggregates are not checked here; they're handled elsewhere. */
6280 if (cfun && current_function_check_memory_usage
6282 && GET_CODE (DECL_RTL (exp)) == MEM
6283 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6285 enum memory_use_mode memory_usage;
6286 memory_usage = get_memory_usage_from_modifier (modifier);
6288 in_check_memory_usage = 1;
6289 if (memory_usage != MEMORY_USE_DONT)
6290 emit_library_call (chkr_check_addr_libfunc,
6291 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6292 XEXP (DECL_RTL (exp), 0), Pmode,
6293 GEN_INT (int_size_in_bytes (type)),
6294 TYPE_MODE (sizetype),
6295 GEN_INT (memory_usage),
6296 TYPE_MODE (integer_type_node));
6297 in_check_memory_usage = 0;
6300 /* ... fall through ... */
6304 if (DECL_RTL (exp) == 0)
6307 /* Ensure variable marked as used even if it doesn't go through
6308 a parser. If it hasn't be used yet, write out an external
6310 if (! TREE_USED (exp))
6312 assemble_external (exp);
6313 TREE_USED (exp) = 1;
6316 /* Show we haven't gotten RTL for this yet. */
6319 /* Handle variables inherited from containing functions. */
6320 context = decl_function_context (exp);
6322 /* We treat inline_function_decl as an alias for the current function
6323 because that is the inline function whose vars, types, etc.
6324 are being merged into the current function.
6325 See expand_inline_function. */
6327 if (context != 0 && context != current_function_decl
6328 && context != inline_function_decl
6329 /* If var is static, we don't need a static chain to access it. */
6330 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6331 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6335 /* Mark as non-local and addressable. */
6336 DECL_NONLOCAL (exp) = 1;
6337 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6339 mark_addressable (exp);
6340 if (GET_CODE (DECL_RTL (exp)) != MEM)
6342 addr = XEXP (DECL_RTL (exp), 0);
6343 if (GET_CODE (addr) == MEM)
6345 = replace_equiv_address (addr,
6346 fix_lexical_addr (XEXP (addr, 0), exp));
6348 addr = fix_lexical_addr (addr, exp);
6350 temp = replace_equiv_address (DECL_RTL (exp), addr);
6353 /* This is the case of an array whose size is to be determined
6354 from its initializer, while the initializer is still being parsed.
6357 else if (GET_CODE (DECL_RTL (exp)) == MEM
6358 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6359 temp = validize_mem (DECL_RTL (exp));
6361 /* If DECL_RTL is memory, we are in the normal case and either
6362 the address is not valid or it is not a register and -fforce-addr
6363 is specified, get the address into a register. */
6365 else if (GET_CODE (DECL_RTL (exp)) == MEM
6366 && modifier != EXPAND_CONST_ADDRESS
6367 && modifier != EXPAND_SUM
6368 && modifier != EXPAND_INITIALIZER
6369 && (! memory_address_p (DECL_MODE (exp),
6370 XEXP (DECL_RTL (exp), 0))
6372 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6373 temp = replace_equiv_address (DECL_RTL (exp),
6374 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6376 /* If we got something, return it. But first, set the alignment
6377 if the address is a register. */
6380 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6381 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6386 /* If the mode of DECL_RTL does not match that of the decl, it
6387 must be a promoted value. We return a SUBREG of the wanted mode,
6388 but mark it so that we know that it was already extended. */
6390 if (GET_CODE (DECL_RTL (exp)) == REG
6391 && GET_MODE (DECL_RTL (exp)) != mode)
6393 /* Get the signedness used for this variable. Ensure we get the
6394 same mode we got when the variable was declared. */
6395 if (GET_MODE (DECL_RTL (exp))
6396 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6399 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6400 SUBREG_PROMOTED_VAR_P (temp) = 1;
6401 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6405 return DECL_RTL (exp);
6408 return immed_double_const (TREE_INT_CST_LOW (exp),
6409 TREE_INT_CST_HIGH (exp), mode);
6412 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6413 EXPAND_MEMORY_USE_BAD);
6416 /* If optimized, generate immediate CONST_DOUBLE
6417 which will be turned into memory by reload if necessary.
6419 We used to force a register so that loop.c could see it. But
6420 this does not allow gen_* patterns to perform optimizations with
6421 the constants. It also produces two insns in cases like "x = 1.0;".
6422 On most machines, floating-point constants are not permitted in
6423 many insns, so we'd end up copying it to a register in any case.
6425 Now, we do the copying in expand_binop, if appropriate. */
6426 return immed_real_const (exp);
6430 if (! TREE_CST_RTL (exp))
6431 output_constant_def (exp, 1);
6433 /* TREE_CST_RTL probably contains a constant address.
6434 On RISC machines where a constant address isn't valid,
6435 make some insns to get that address into a register. */
6436 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6437 && modifier != EXPAND_CONST_ADDRESS
6438 && modifier != EXPAND_INITIALIZER
6439 && modifier != EXPAND_SUM
6440 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6442 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6443 return replace_equiv_address (TREE_CST_RTL (exp),
6444 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6445 return TREE_CST_RTL (exp);
6447 case EXPR_WITH_FILE_LOCATION:
6450 const char *saved_input_filename = input_filename;
6451 int saved_lineno = lineno;
6452 input_filename = EXPR_WFL_FILENAME (exp);
6453 lineno = EXPR_WFL_LINENO (exp);
6454 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6455 emit_line_note (input_filename, lineno);
6456 /* Possibly avoid switching back and forth here. */
6457 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6458 input_filename = saved_input_filename;
6459 lineno = saved_lineno;
6464 context = decl_function_context (exp);
6466 /* If this SAVE_EXPR was at global context, assume we are an
6467 initialization function and move it into our context. */
6469 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6471 /* We treat inline_function_decl as an alias for the current function
6472 because that is the inline function whose vars, types, etc.
6473 are being merged into the current function.
6474 See expand_inline_function. */
6475 if (context == current_function_decl || context == inline_function_decl)
6478 /* If this is non-local, handle it. */
6481 /* The following call just exists to abort if the context is
6482 not of a containing function. */
6483 find_function_data (context);
6485 temp = SAVE_EXPR_RTL (exp);
6486 if (temp && GET_CODE (temp) == REG)
6488 put_var_into_stack (exp);
6489 temp = SAVE_EXPR_RTL (exp);
6491 if (temp == 0 || GET_CODE (temp) != MEM)
6494 replace_equiv_address (temp,
6495 fix_lexical_addr (XEXP (temp, 0), exp));
6497 if (SAVE_EXPR_RTL (exp) == 0)
6499 if (mode == VOIDmode)
6502 temp = assign_temp (build_qualified_type (type,
6504 | TYPE_QUAL_CONST)),
6507 SAVE_EXPR_RTL (exp) = temp;
6508 if (!optimize && GET_CODE (temp) == REG)
6509 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6512 /* If the mode of TEMP does not match that of the expression, it
6513 must be a promoted value. We pass store_expr a SUBREG of the
6514 wanted mode but mark it so that we know that it was already
6515 extended. Note that `unsignedp' was modified above in
6518 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6520 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6521 SUBREG_PROMOTED_VAR_P (temp) = 1;
6522 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6525 if (temp == const0_rtx)
6526 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6527 EXPAND_MEMORY_USE_BAD);
6529 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6531 TREE_USED (exp) = 1;
6534 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6535 must be a promoted value. We return a SUBREG of the wanted mode,
6536 but mark it so that we know that it was already extended. */
6538 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6539 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6541 /* Compute the signedness and make the proper SUBREG. */
6542 promote_mode (type, mode, &unsignedp, 0);
6543 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6544 SUBREG_PROMOTED_VAR_P (temp) = 1;
6545 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6549 return SAVE_EXPR_RTL (exp);
6554 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6555 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6559 case PLACEHOLDER_EXPR:
6561 tree old_list = placeholder_list;
6562 tree placeholder_expr = 0;
6564 exp = find_placeholder (exp, &placeholder_expr);
6565 placeholder_list = TREE_CHAIN (placeholder_expr);
6566 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6567 placeholder_list = old_list;
6571 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6574 case WITH_RECORD_EXPR:
6575 /* Put the object on the placeholder list, expand our first operand,
6576 and pop the list. */
6577 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6579 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6580 tmode, ro_modifier);
6581 placeholder_list = TREE_CHAIN (placeholder_list);
6585 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6586 expand_goto (TREE_OPERAND (exp, 0));
6588 expand_computed_goto (TREE_OPERAND (exp, 0));
6592 expand_exit_loop_if_false (NULL,
6593 invert_truthvalue (TREE_OPERAND (exp, 0)));
6596 case LABELED_BLOCK_EXPR:
6597 if (LABELED_BLOCK_BODY (exp))
6598 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6599 /* Should perhaps use expand_label, but this is simpler and safer. */
6600 do_pending_stack_adjust ();
6601 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6604 case EXIT_BLOCK_EXPR:
6605 if (EXIT_BLOCK_RETURN (exp))
6606 sorry ("returned value in block_exit_expr");
6607 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6612 expand_start_loop (1);
6613 expand_expr_stmt (TREE_OPERAND (exp, 0));
6621 tree vars = TREE_OPERAND (exp, 0);
6622 int vars_need_expansion = 0;
6624 /* Need to open a binding contour here because
6625 if there are any cleanups they must be contained here. */
6626 expand_start_bindings (2);
6628 /* Mark the corresponding BLOCK for output in its proper place. */
6629 if (TREE_OPERAND (exp, 2) != 0
6630 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6631 insert_block (TREE_OPERAND (exp, 2));
6633 /* If VARS have not yet been expanded, expand them now. */
6636 if (!DECL_RTL_SET_P (vars))
6638 vars_need_expansion = 1;
6641 expand_decl_init (vars);
6642 vars = TREE_CHAIN (vars);
6645 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6647 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6653 if (RTL_EXPR_SEQUENCE (exp))
6655 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6657 emit_insns (RTL_EXPR_SEQUENCE (exp));
6658 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6660 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6661 free_temps_for_rtl_expr (exp);
6662 return RTL_EXPR_RTL (exp);
6665 /* If we don't need the result, just ensure we evaluate any
6670 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6671 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6672 EXPAND_MEMORY_USE_BAD);
6676 /* All elts simple constants => refer to a constant in memory. But
6677 if this is a non-BLKmode mode, let it store a field at a time
6678 since that should make a CONST_INT or CONST_DOUBLE when we
6679 fold. Likewise, if we have a target we can use, it is best to
6680 store directly into the target unless the type is large enough
6681 that memcpy will be used. If we are making an initializer and
6682 all operands are constant, put it in memory as well. */
6683 else if ((TREE_STATIC (exp)
6684 && ((mode == BLKmode
6685 && ! (target != 0 && safe_from_p (target, exp, 1)))
6686 || TREE_ADDRESSABLE (exp)
6687 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6688 && (! MOVE_BY_PIECES_P
6689 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6691 && ! mostly_zeros_p (exp))))
6692 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6694 rtx constructor = output_constant_def (exp, 1);
6696 if (modifier != EXPAND_CONST_ADDRESS
6697 && modifier != EXPAND_INITIALIZER
6698 && modifier != EXPAND_SUM)
6699 constructor = validize_mem (constructor);
6705 /* Handle calls that pass values in multiple non-contiguous
6706 locations. The Irix 6 ABI has examples of this. */
6707 if (target == 0 || ! safe_from_p (target, exp, 1)
6708 || GET_CODE (target) == PARALLEL)
6710 = assign_temp (build_qualified_type (type,
6712 | (TREE_READONLY (exp)
6713 * TYPE_QUAL_CONST))),
6714 TREE_ADDRESSABLE (exp), 1, 1);
6716 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6717 int_size_in_bytes (TREE_TYPE (exp)));
6723 tree exp1 = TREE_OPERAND (exp, 0);
6725 tree string = string_constant (exp1, &index);
6727 /* Try to optimize reads from const strings. */
6729 && TREE_CODE (string) == STRING_CST
6730 && TREE_CODE (index) == INTEGER_CST
6731 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6732 && GET_MODE_CLASS (mode) == MODE_INT
6733 && GET_MODE_SIZE (mode) == 1
6734 && modifier != EXPAND_MEMORY_USE_WO)
6736 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6738 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6739 op0 = memory_address (mode, op0);
6741 if (cfun && current_function_check_memory_usage
6742 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6744 enum memory_use_mode memory_usage;
6745 memory_usage = get_memory_usage_from_modifier (modifier);
6747 if (memory_usage != MEMORY_USE_DONT)
6749 in_check_memory_usage = 1;
6750 emit_library_call (chkr_check_addr_libfunc,
6751 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6752 Pmode, GEN_INT (int_size_in_bytes (type)),
6753 TYPE_MODE (sizetype),
6754 GEN_INT (memory_usage),
6755 TYPE_MODE (integer_type_node));
6756 in_check_memory_usage = 0;
6760 temp = gen_rtx_MEM (mode, op0);
6761 set_mem_attributes (temp, exp, 0);
6763 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6764 here, because, in C and C++, the fact that a location is accessed
6765 through a pointer to const does not mean that the value there can
6766 never change. Languages where it can never change should
6767 also set TREE_STATIC. */
6768 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6770 /* If we are writing to this object and its type is a record with
6771 readonly fields, we must mark it as readonly so it will
6772 conflict with readonly references to those fields. */
6773 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6774 RTX_UNCHANGING_P (temp) = 1;
6780 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6784 tree array = TREE_OPERAND (exp, 0);
6785 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6786 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6787 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6790 /* Optimize the special-case of a zero lower bound.
6792 We convert the low_bound to sizetype to avoid some problems
6793 with constant folding. (E.g. suppose the lower bound is 1,
6794 and its mode is QI. Without the conversion, (ARRAY
6795 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6796 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6798 if (! integer_zerop (low_bound))
6799 index = size_diffop (index, convert (sizetype, low_bound));
6801 /* Fold an expression like: "foo"[2].
6802 This is not done in fold so it won't happen inside &.
6803 Don't fold if this is for wide characters since it's too
6804 difficult to do correctly and this is a very rare case. */
6806 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6807 && TREE_CODE (array) == STRING_CST
6808 && TREE_CODE (index) == INTEGER_CST
6809 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6810 && GET_MODE_CLASS (mode) == MODE_INT
6811 && GET_MODE_SIZE (mode) == 1)
6813 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6815 /* If this is a constant index into a constant array,
6816 just get the value from the array. Handle both the cases when
6817 we have an explicit constructor and when our operand is a variable
6818 that was declared const. */
6820 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6821 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6822 && TREE_CODE (index) == INTEGER_CST
6823 && 0 > compare_tree_int (index,
6824 list_length (CONSTRUCTOR_ELTS
6825 (TREE_OPERAND (exp, 0)))))
6829 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6830 i = TREE_INT_CST_LOW (index);
6831 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6835 return expand_expr (fold (TREE_VALUE (elem)), target,
6836 tmode, ro_modifier);
6839 else if (optimize >= 1
6840 && modifier != EXPAND_CONST_ADDRESS
6841 && modifier != EXPAND_INITIALIZER
6842 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6843 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6844 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6846 if (TREE_CODE (index) == INTEGER_CST)
6848 tree init = DECL_INITIAL (array);
6850 if (TREE_CODE (init) == CONSTRUCTOR)
6854 for (elem = CONSTRUCTOR_ELTS (init);
6856 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6857 elem = TREE_CHAIN (elem))
6860 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6861 return expand_expr (fold (TREE_VALUE (elem)), target,
6862 tmode, ro_modifier);
6864 else if (TREE_CODE (init) == STRING_CST
6865 && 0 > compare_tree_int (index,
6866 TREE_STRING_LENGTH (init)))
6868 tree type = TREE_TYPE (TREE_TYPE (init));
6869 enum machine_mode mode = TYPE_MODE (type);
6871 if (GET_MODE_CLASS (mode) == MODE_INT
6872 && GET_MODE_SIZE (mode) == 1)
6874 (TREE_STRING_POINTER
6875 (init)[TREE_INT_CST_LOW (index)]));
6884 case ARRAY_RANGE_REF:
6885 /* If the operand is a CONSTRUCTOR, we can just extract the
6886 appropriate field if it is present. Don't do this if we have
6887 already written the data since we want to refer to that copy
6888 and varasm.c assumes that's what we'll do. */
6889 if (code == COMPONENT_REF
6890 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6891 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6895 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6896 elt = TREE_CHAIN (elt))
6897 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6898 /* We can normally use the value of the field in the
6899 CONSTRUCTOR. However, if this is a bitfield in
6900 an integral mode that we can fit in a HOST_WIDE_INT,
6901 we must mask only the number of bits in the bitfield,
6902 since this is done implicitly by the constructor. If
6903 the bitfield does not meet either of those conditions,
6904 we can't do this optimization. */
6905 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6906 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6908 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6909 <= HOST_BITS_PER_WIDE_INT))))
6911 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6912 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6914 HOST_WIDE_INT bitsize
6915 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6917 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6919 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6920 op0 = expand_and (op0, op1, target);
6924 enum machine_mode imode
6925 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6927 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6930 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6932 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6942 enum machine_mode mode1;
6943 HOST_WIDE_INT bitsize, bitpos;
6946 unsigned int alignment;
6947 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6948 &mode1, &unsignedp, &volatilep,
6952 /* If we got back the original object, something is wrong. Perhaps
6953 we are evaluating an expression too early. In any event, don't
6954 infinitely recurse. */
6958 /* If TEM's type is a union of variable size, pass TARGET to the inner
6959 computation, since it will need a temporary and TARGET is known
6960 to have to do. This occurs in unchecked conversion in Ada. */
6964 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6965 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6967 ? target : NULL_RTX),
6969 (modifier == EXPAND_INITIALIZER
6970 || modifier == EXPAND_CONST_ADDRESS)
6971 ? modifier : EXPAND_NORMAL);
6973 /* If this is a constant, put it into a register if it is a
6974 legitimate constant and OFFSET is 0 and memory if it isn't. */
6975 if (CONSTANT_P (op0))
6977 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6978 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6980 op0 = force_reg (mode, op0);
6982 op0 = validize_mem (force_const_mem (mode, op0));
6987 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6989 /* If this object is in a register, put it into memory.
6990 This case can't occur in C, but can in Ada if we have
6991 unchecked conversion of an expression from a scalar type to
6992 an array or record type. */
6993 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6994 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6996 /* If the operand is a SAVE_EXPR, we can deal with this by
6997 forcing the SAVE_EXPR into memory. */
6998 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7000 put_var_into_stack (TREE_OPERAND (exp, 0));
7001 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7006 = build_qualified_type (TREE_TYPE (tem),
7007 (TYPE_QUALS (TREE_TYPE (tem))
7008 | TYPE_QUAL_CONST));
7009 rtx memloc = assign_temp (nt, 1, 1, 1);
7011 mark_temp_addr_taken (memloc);
7012 emit_move_insn (memloc, op0);
7017 if (GET_CODE (op0) != MEM)
7020 if (GET_MODE (offset_rtx) != ptr_mode)
7022 #ifdef POINTERS_EXTEND_UNSIGNED
7023 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7025 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7029 /* A constant address in OP0 can have VOIDmode, we must not try
7030 to call force_reg for that case. Avoid that case. */
7031 if (GET_CODE (op0) == MEM
7032 && GET_MODE (op0) == BLKmode
7033 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7035 && (bitpos % bitsize) == 0
7036 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7037 && alignment == GET_MODE_ALIGNMENT (mode1))
7039 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7041 if (GET_CODE (XEXP (temp, 0)) == REG)
7044 op0 = (replace_equiv_address
7046 force_reg (GET_MODE (XEXP (temp, 0)),
7051 op0 = change_address (op0, VOIDmode,
7052 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7053 force_reg (ptr_mode,
7057 /* Don't forget about volatility even if this is a bitfield. */
7058 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7060 if (op0 == orig_op0)
7061 op0 = copy_rtx (op0);
7063 MEM_VOLATILE_P (op0) = 1;
7066 /* Check the access. */
7067 if (cfun != 0 && current_function_check_memory_usage
7068 && GET_CODE (op0) == MEM)
7070 enum memory_use_mode memory_usage;
7071 memory_usage = get_memory_usage_from_modifier (modifier);
7073 if (memory_usage != MEMORY_USE_DONT)
7078 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7079 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7081 /* Check the access right of the pointer. */
7082 in_check_memory_usage = 1;
7083 if (size > BITS_PER_UNIT)
7084 emit_library_call (chkr_check_addr_libfunc,
7085 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7086 Pmode, GEN_INT (size / BITS_PER_UNIT),
7087 TYPE_MODE (sizetype),
7088 GEN_INT (memory_usage),
7089 TYPE_MODE (integer_type_node));
7090 in_check_memory_usage = 0;
7094 /* In cases where an aligned union has an unaligned object
7095 as a field, we might be extracting a BLKmode value from
7096 an integer-mode (e.g., SImode) object. Handle this case
7097 by doing the extract into an object as wide as the field
7098 (which we know to be the width of a basic mode), then
7099 storing into memory, and changing the mode to BLKmode. */
7100 if (mode1 == VOIDmode
7101 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7102 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7103 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7104 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7105 && modifier != EXPAND_CONST_ADDRESS
7106 && modifier != EXPAND_INITIALIZER)
7107 /* If the field isn't aligned enough to fetch as a memref,
7108 fetch it as a bit field. */
7109 || (mode1 != BLKmode
7110 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7111 && ((TYPE_ALIGN (TREE_TYPE (tem))
7112 < GET_MODE_ALIGNMENT (mode))
7113 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7114 /* If the type and the field are a constant size and the
7115 size of the type isn't the same size as the bitfield,
7116 we must use bitfield operations. */
7118 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7120 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7123 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7124 && (TYPE_ALIGN (type) > alignment
7125 || bitpos % TYPE_ALIGN (type) != 0)))
7127 enum machine_mode ext_mode = mode;
7129 if (ext_mode == BLKmode
7130 && ! (target != 0 && GET_CODE (op0) == MEM
7131 && GET_CODE (target) == MEM
7132 && bitpos % BITS_PER_UNIT == 0))
7133 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7135 if (ext_mode == BLKmode)
7137 /* In this case, BITPOS must start at a byte boundary and
7138 TARGET, if specified, must be a MEM. */
7139 if (GET_CODE (op0) != MEM
7140 || (target != 0 && GET_CODE (target) != MEM)
7141 || bitpos % BITS_PER_UNIT != 0)
7144 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7146 target = assign_temp (type, 0, 1, 1);
7148 emit_block_move (target, op0,
7149 bitsize == -1 ? expr_size (exp)
7150 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7157 op0 = validize_mem (op0);
7159 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7160 mark_reg_pointer (XEXP (op0, 0), alignment);
7162 op0 = extract_bit_field (op0, bitsize, bitpos,
7163 unsignedp, target, ext_mode, ext_mode,
7165 int_size_in_bytes (TREE_TYPE (tem)));
7167 /* If the result is a record type and BITSIZE is narrower than
7168 the mode of OP0, an integral mode, and this is a big endian
7169 machine, we must put the field into the high-order bits. */
7170 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7171 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7172 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7173 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7174 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7178 if (mode == BLKmode)
7180 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7182 rtx new = assign_temp (nt, 0, 1, 1);
7184 emit_move_insn (new, op0);
7185 op0 = copy_rtx (new);
7186 PUT_MODE (op0, BLKmode);
7192 /* If the result is BLKmode, use that to access the object
7194 if (mode == BLKmode)
7197 /* Get a reference to just this component. */
7198 if (modifier == EXPAND_CONST_ADDRESS
7199 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7200 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7202 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7204 if (op0 == orig_op0)
7205 op0 = copy_rtx (op0);
7207 set_mem_attributes (op0, exp, 0);
7208 if (GET_CODE (XEXP (op0, 0)) == REG)
7209 mark_reg_pointer (XEXP (op0, 0), alignment);
7211 MEM_VOLATILE_P (op0) |= volatilep;
7212 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7213 || modifier == EXPAND_CONST_ADDRESS
7214 || modifier == EXPAND_INITIALIZER)
7216 else if (target == 0)
7217 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7219 convert_move (target, op0, unsignedp);
7225 rtx insn, before = get_last_insn (), vtbl_ref;
7227 /* Evaluate the interior expression. */
7228 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7231 /* Get or create an instruction off which to hang a note. */
7232 if (REG_P (subtarget))
7235 insn = get_last_insn ();
7238 if (! INSN_P (insn))
7239 insn = prev_nonnote_insn (insn);
7243 target = gen_reg_rtx (GET_MODE (subtarget));
7244 insn = emit_move_insn (target, subtarget);
7247 /* Collect the data for the note. */
7248 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7249 vtbl_ref = plus_constant (vtbl_ref,
7250 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7251 /* Discard the initial CONST that was added. */
7252 vtbl_ref = XEXP (vtbl_ref, 0);
7255 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7260 /* Intended for a reference to a buffer of a file-object in Pascal.
7261 But it's not certain that a special tree code will really be
7262 necessary for these. INDIRECT_REF might work for them. */
7268 /* Pascal set IN expression.
7271 rlo = set_low - (set_low%bits_per_word);
7272 the_word = set [ (index - rlo)/bits_per_word ];
7273 bit_index = index % bits_per_word;
7274 bitmask = 1 << bit_index;
7275 return !!(the_word & bitmask); */
7277 tree set = TREE_OPERAND (exp, 0);
7278 tree index = TREE_OPERAND (exp, 1);
7279 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7280 tree set_type = TREE_TYPE (set);
7281 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7282 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7283 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7284 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7285 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7286 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7287 rtx setaddr = XEXP (setval, 0);
7288 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7290 rtx diff, quo, rem, addr, bit, result;
7292 /* If domain is empty, answer is no. Likewise if index is constant
7293 and out of bounds. */
7294 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7295 && TREE_CODE (set_low_bound) == INTEGER_CST
7296 && tree_int_cst_lt (set_high_bound, set_low_bound))
7297 || (TREE_CODE (index) == INTEGER_CST
7298 && TREE_CODE (set_low_bound) == INTEGER_CST
7299 && tree_int_cst_lt (index, set_low_bound))
7300 || (TREE_CODE (set_high_bound) == INTEGER_CST
7301 && TREE_CODE (index) == INTEGER_CST
7302 && tree_int_cst_lt (set_high_bound, index))))
7306 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308 /* If we get here, we have to generate the code for both cases
7309 (in range and out of range). */
7311 op0 = gen_label_rtx ();
7312 op1 = gen_label_rtx ();
7314 if (! (GET_CODE (index_val) == CONST_INT
7315 && GET_CODE (lo_r) == CONST_INT))
7317 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7318 GET_MODE (index_val), iunsignedp, 0, op1);
7321 if (! (GET_CODE (index_val) == CONST_INT
7322 && GET_CODE (hi_r) == CONST_INT))
7324 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7325 GET_MODE (index_val), iunsignedp, 0, op1);
7328 /* Calculate the element number of bit zero in the first word
7330 if (GET_CODE (lo_r) == CONST_INT)
7331 rlow = GEN_INT (INTVAL (lo_r)
7332 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7334 rlow = expand_binop (index_mode, and_optab, lo_r,
7335 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7336 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7338 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7339 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7341 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7342 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7343 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7344 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7346 addr = memory_address (byte_mode,
7347 expand_binop (index_mode, add_optab, diff,
7348 setaddr, NULL_RTX, iunsignedp,
7351 /* Extract the bit we want to examine. */
7352 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7353 gen_rtx_MEM (byte_mode, addr),
7354 make_tree (TREE_TYPE (index), rem),
7356 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7357 GET_MODE (target) == byte_mode ? target : 0,
7358 1, OPTAB_LIB_WIDEN);
7360 if (result != target)
7361 convert_move (target, result, 1);
7363 /* Output the code to handle the out-of-range case. */
7366 emit_move_insn (target, const0_rtx);
7371 case WITH_CLEANUP_EXPR:
7372 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7374 WITH_CLEANUP_EXPR_RTL (exp)
7375 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7376 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7378 /* That's it for this cleanup. */
7379 TREE_OPERAND (exp, 1) = 0;
7381 return WITH_CLEANUP_EXPR_RTL (exp);
7383 case CLEANUP_POINT_EXPR:
7385 /* Start a new binding layer that will keep track of all cleanup
7386 actions to be performed. */
7387 expand_start_bindings (2);
7389 target_temp_slot_level = temp_slot_level;
7391 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7392 /* If we're going to use this value, load it up now. */
7394 op0 = force_not_mem (op0);
7395 preserve_temp_slots (op0);
7396 expand_end_bindings (NULL_TREE, 0, 0);
7401 /* Check for a built-in function. */
7402 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7403 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7405 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7407 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7408 == BUILT_IN_FRONTEND)
7409 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7411 return expand_builtin (exp, target, subtarget, tmode, ignore);
7414 return expand_call (exp, target, ignore);
7416 case NON_LVALUE_EXPR:
7419 case REFERENCE_EXPR:
7420 if (TREE_OPERAND (exp, 0) == error_mark_node)
7423 if (TREE_CODE (type) == UNION_TYPE)
7425 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7427 /* If both input and output are BLKmode, this conversion
7428 isn't actually doing anything unless we need to make the
7429 alignment stricter. */
7430 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7431 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7432 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7433 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7437 target = assign_temp (type, 0, 1, 1);
7439 if (GET_CODE (target) == MEM)
7440 /* Store data into beginning of memory target. */
7441 store_expr (TREE_OPERAND (exp, 0),
7442 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7444 else if (GET_CODE (target) == REG)
7445 /* Store this field into a union of the proper type. */
7446 store_field (target,
7447 MIN ((int_size_in_bytes (TREE_TYPE
7448 (TREE_OPERAND (exp, 0)))
7450 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7451 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7452 VOIDmode, 0, BITS_PER_UNIT,
7453 int_size_in_bytes (type), 0);
7457 /* Return the entire union. */
7461 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7463 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7466 /* If the signedness of the conversion differs and OP0 is
7467 a promoted SUBREG, clear that indication since we now
7468 have to do the proper extension. */
7469 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7470 && GET_CODE (op0) == SUBREG)
7471 SUBREG_PROMOTED_VAR_P (op0) = 0;
7476 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7477 if (GET_MODE (op0) == mode)
7480 /* If OP0 is a constant, just convert it into the proper mode. */
7481 if (CONSTANT_P (op0))
7483 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7484 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7486 if (modifier == EXPAND_INITIALIZER)
7487 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7491 convert_to_mode (mode, op0,
7492 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7494 convert_move (target, op0,
7495 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7499 /* We come here from MINUS_EXPR when the second operand is a
7502 this_optab = ! unsignedp && flag_trapv
7503 && (GET_MODE_CLASS(mode) == MODE_INT)
7504 ? addv_optab : add_optab;
7506 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7507 something else, make sure we add the register to the constant and
7508 then to the other thing. This case can occur during strength
7509 reduction and doing it this way will produce better code if the
7510 frame pointer or argument pointer is eliminated.
7512 fold-const.c will ensure that the constant is always in the inner
7513 PLUS_EXPR, so the only case we need to do anything about is if
7514 sp, ap, or fp is our second argument, in which case we must swap
7515 the innermost first argument and our second argument. */
7517 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7518 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7519 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7520 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7521 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7522 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7524 tree t = TREE_OPERAND (exp, 1);
7526 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7527 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7530 /* If the result is to be ptr_mode and we are adding an integer to
7531 something, we might be forming a constant. So try to use
7532 plus_constant. If it produces a sum and we can't accept it,
7533 use force_operand. This allows P = &ARR[const] to generate
7534 efficient code on machines where a SYMBOL_REF is not a valid
7537 If this is an EXPAND_SUM call, always return the sum. */
7538 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7539 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7541 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7542 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7543 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7547 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7549 /* Use immed_double_const to ensure that the constant is
7550 truncated according to the mode of OP1, then sign extended
7551 to a HOST_WIDE_INT. Using the constant directly can result
7552 in non-canonical RTL in a 64x32 cross compile. */
7554 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7556 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7557 op1 = plus_constant (op1, INTVAL (constant_part));
7558 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7559 op1 = force_operand (op1, target);
7563 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7564 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7565 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7569 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7571 if (! CONSTANT_P (op0))
7573 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7574 VOIDmode, modifier);
7575 /* Don't go to both_summands if modifier
7576 says it's not right to return a PLUS. */
7577 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7581 /* Use immed_double_const to ensure that the constant is
7582 truncated according to the mode of OP1, then sign extended
7583 to a HOST_WIDE_INT. Using the constant directly can result
7584 in non-canonical RTL in a 64x32 cross compile. */
7586 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7588 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7589 op0 = plus_constant (op0, INTVAL (constant_part));
7590 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7591 op0 = force_operand (op0, target);
7596 /* No sense saving up arithmetic to be done
7597 if it's all in the wrong mode to form part of an address.
7598 And force_operand won't know whether to sign-extend or
7600 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7601 || mode != ptr_mode)
7604 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7607 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7608 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7611 /* Make sure any term that's a sum with a constant comes last. */
7612 if (GET_CODE (op0) == PLUS
7613 && CONSTANT_P (XEXP (op0, 1)))
7619 /* If adding to a sum including a constant,
7620 associate it to put the constant outside. */
7621 if (GET_CODE (op1) == PLUS
7622 && CONSTANT_P (XEXP (op1, 1)))
7624 rtx constant_term = const0_rtx;
7626 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7629 /* Ensure that MULT comes first if there is one. */
7630 else if (GET_CODE (op0) == MULT)
7631 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7633 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7635 /* Let's also eliminate constants from op0 if possible. */
7636 op0 = eliminate_constant_term (op0, &constant_term);
7638 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7639 their sum should be a constant. Form it into OP1, since the
7640 result we want will then be OP0 + OP1. */
7642 temp = simplify_binary_operation (PLUS, mode, constant_term,
7647 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7650 /* Put a constant term last and put a multiplication first. */
7651 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7652 temp = op1, op1 = op0, op0 = temp;
7654 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7655 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7658 /* For initializers, we are allowed to return a MINUS of two
7659 symbolic constants. Here we handle all cases when both operands
7661 /* Handle difference of two symbolic constants,
7662 for the sake of an initializer. */
7663 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7664 && really_constant_p (TREE_OPERAND (exp, 0))
7665 && really_constant_p (TREE_OPERAND (exp, 1)))
7667 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7668 VOIDmode, ro_modifier);
7669 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7670 VOIDmode, ro_modifier);
7672 /* If the last operand is a CONST_INT, use plus_constant of
7673 the negated constant. Else make the MINUS. */
7674 if (GET_CODE (op1) == CONST_INT)
7675 return plus_constant (op0, - INTVAL (op1));
7677 return gen_rtx_MINUS (mode, op0, op1);
7679 /* Convert A - const to A + (-const). */
7680 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7682 tree negated = fold (build1 (NEGATE_EXPR, type,
7683 TREE_OPERAND (exp, 1)));
7685 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7686 /* If we can't negate the constant in TYPE, leave it alone and
7687 expand_binop will negate it for us. We used to try to do it
7688 here in the signed version of TYPE, but that doesn't work
7689 on POINTER_TYPEs. */;
7692 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7696 this_optab = ! unsignedp && flag_trapv
7697 && (GET_MODE_CLASS(mode) == MODE_INT)
7698 ? subv_optab : sub_optab;
7702 /* If first operand is constant, swap them.
7703 Thus the following special case checks need only
7704 check the second operand. */
7705 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7707 tree t1 = TREE_OPERAND (exp, 0);
7708 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7709 TREE_OPERAND (exp, 1) = t1;
7712 /* Attempt to return something suitable for generating an
7713 indexed address, for machines that support that. */
7715 if (modifier == EXPAND_SUM && mode == ptr_mode
7716 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7717 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7719 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7722 /* Apply distributive law if OP0 is x+c. */
7723 if (GET_CODE (op0) == PLUS
7724 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7729 (mode, XEXP (op0, 0),
7730 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7731 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7732 * INTVAL (XEXP (op0, 1))));
7734 if (GET_CODE (op0) != REG)
7735 op0 = force_operand (op0, NULL_RTX);
7736 if (GET_CODE (op0) != REG)
7737 op0 = copy_to_mode_reg (mode, op0);
7740 gen_rtx_MULT (mode, op0,
7741 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7744 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7747 /* Check for multiplying things that have been extended
7748 from a narrower type. If this machine supports multiplying
7749 in that narrower type with a result in the desired type,
7750 do it that way, and avoid the explicit type-conversion. */
7751 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7752 && TREE_CODE (type) == INTEGER_TYPE
7753 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7754 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7755 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7756 && int_fits_type_p (TREE_OPERAND (exp, 1),
7757 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7758 /* Don't use a widening multiply if a shift will do. */
7759 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7760 > HOST_BITS_PER_WIDE_INT)
7761 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7763 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7764 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7766 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7767 /* If both operands are extended, they must either both
7768 be zero-extended or both be sign-extended. */
7769 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7771 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7773 enum machine_mode innermode
7774 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7775 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7776 ? smul_widen_optab : umul_widen_optab);
7777 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7778 ? umul_widen_optab : smul_widen_optab);
7779 if (mode == GET_MODE_WIDER_MODE (innermode))
7781 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7783 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7784 NULL_RTX, VOIDmode, 0);
7785 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7786 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7789 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7790 NULL_RTX, VOIDmode, 0);
7793 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7794 && innermode == word_mode)
7797 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7798 NULL_RTX, VOIDmode, 0);
7799 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7800 op1 = convert_modes (innermode, mode,
7801 expand_expr (TREE_OPERAND (exp, 1),
7802 NULL_RTX, VOIDmode, 0),
7805 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7806 NULL_RTX, VOIDmode, 0);
7807 temp = expand_binop (mode, other_optab, op0, op1, target,
7808 unsignedp, OPTAB_LIB_WIDEN);
7809 htem = expand_mult_highpart_adjust (innermode,
7810 gen_highpart (innermode, temp),
7812 gen_highpart (innermode, temp),
7814 emit_move_insn (gen_highpart (innermode, temp), htem);
7819 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7820 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7821 return expand_mult (mode, op0, op1, target, unsignedp);
7823 case TRUNC_DIV_EXPR:
7824 case FLOOR_DIV_EXPR:
7826 case ROUND_DIV_EXPR:
7827 case EXACT_DIV_EXPR:
7828 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7830 /* Possible optimization: compute the dividend with EXPAND_SUM
7831 then if the divisor is constant can optimize the case
7832 where some terms of the dividend have coeffs divisible by it. */
7833 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7834 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7835 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7838 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7839 expensive divide. If not, combine will rebuild the original
7841 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7842 && !real_onep (TREE_OPERAND (exp, 0)))
7843 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7844 build (RDIV_EXPR, type,
7845 build_real (type, dconst1),
7846 TREE_OPERAND (exp, 1))),
7847 target, tmode, unsignedp);
7848 this_optab = sdiv_optab;
7851 case TRUNC_MOD_EXPR:
7852 case FLOOR_MOD_EXPR:
7854 case ROUND_MOD_EXPR:
7855 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7857 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7858 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7859 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7861 case FIX_ROUND_EXPR:
7862 case FIX_FLOOR_EXPR:
7864 abort (); /* Not used for C. */
7866 case FIX_TRUNC_EXPR:
7867 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7869 target = gen_reg_rtx (mode);
7870 expand_fix (target, op0, unsignedp);
7874 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7876 target = gen_reg_rtx (mode);
7877 /* expand_float can't figure out what to do if FROM has VOIDmode.
7878 So give it the correct mode. With -O, cse will optimize this. */
7879 if (GET_MODE (op0) == VOIDmode)
7880 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7882 expand_float (target, op0,
7883 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7887 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7888 temp = expand_unop (mode,
7889 ! unsignedp && flag_trapv
7890 && (GET_MODE_CLASS(mode) == MODE_INT)
7891 ? negv_optab : neg_optab, op0, target, 0);
7897 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7899 /* Handle complex values specially. */
7900 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7901 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7902 return expand_complex_abs (mode, op0, target, unsignedp);
7904 /* Unsigned abs is simply the operand. Testing here means we don't
7905 risk generating incorrect code below. */
7906 if (TREE_UNSIGNED (type))
7909 return expand_abs (mode, op0, target, unsignedp,
7910 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7914 target = original_target;
7915 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7916 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7917 || GET_MODE (target) != mode
7918 || (GET_CODE (target) == REG
7919 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7920 target = gen_reg_rtx (mode);
7921 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7922 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7924 /* First try to do it with a special MIN or MAX instruction.
7925 If that does not win, use a conditional jump to select the proper
7927 this_optab = (TREE_UNSIGNED (type)
7928 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7929 : (code == MIN_EXPR ? smin_optab : smax_optab));
7931 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7936 /* At this point, a MEM target is no longer useful; we will get better
7939 if (GET_CODE (target) == MEM)
7940 target = gen_reg_rtx (mode);
7943 emit_move_insn (target, op0);
7945 op0 = gen_label_rtx ();
7947 /* If this mode is an integer too wide to compare properly,
7948 compare word by word. Rely on cse to optimize constant cases. */
7949 if (GET_MODE_CLASS (mode) == MODE_INT
7950 && ! can_compare_p (GE, mode, ccp_jump))
7952 if (code == MAX_EXPR)
7953 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7954 target, op1, NULL_RTX, op0);
7956 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7957 op1, target, NULL_RTX, op0);
7961 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7962 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7963 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7966 emit_move_insn (target, op1);
7971 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7972 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7978 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7979 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7984 /* ??? Can optimize bitwise operations with one arg constant.
7985 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7986 and (a bitwise1 b) bitwise2 b (etc)
7987 but that is probably not worth while. */
7989 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7990 boolean values when we want in all cases to compute both of them. In
7991 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7992 as actual zero-or-1 values and then bitwise anding. In cases where
7993 there cannot be any side effects, better code would be made by
7994 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7995 how to recognize those cases. */
7997 case TRUTH_AND_EXPR:
7999 this_optab = and_optab;
8004 this_optab = ior_optab;
8007 case TRUTH_XOR_EXPR:
8009 this_optab = xor_optab;
8016 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8018 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8019 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8022 /* Could determine the answer when only additive constants differ. Also,
8023 the addition of one can be handled by changing the condition. */
8030 case UNORDERED_EXPR:
8037 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8041 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8042 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8044 && GET_CODE (original_target) == REG
8045 && (GET_MODE (original_target)
8046 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8048 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8051 if (temp != original_target)
8052 temp = copy_to_reg (temp);
8054 op1 = gen_label_rtx ();
8055 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8056 GET_MODE (temp), unsignedp, 0, op1);
8057 emit_move_insn (temp, const1_rtx);
8062 /* If no set-flag instruction, must generate a conditional
8063 store into a temporary variable. Drop through
8064 and handle this like && and ||. */
8066 case TRUTH_ANDIF_EXPR:
8067 case TRUTH_ORIF_EXPR:
8069 && (target == 0 || ! safe_from_p (target, exp, 1)
8070 /* Make sure we don't have a hard reg (such as function's return
8071 value) live across basic blocks, if not optimizing. */
8072 || (!optimize && GET_CODE (target) == REG
8073 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8074 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8077 emit_clr_insn (target);
8079 op1 = gen_label_rtx ();
8080 jumpifnot (exp, op1);
8083 emit_0_to_1_insn (target);
8086 return ignore ? const0_rtx : target;
8088 case TRUTH_NOT_EXPR:
8089 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8090 /* The parser is careful to generate TRUTH_NOT_EXPR
8091 only with operands that are always zero or one. */
8092 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8093 target, 1, OPTAB_LIB_WIDEN);
8099 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8101 return expand_expr (TREE_OPERAND (exp, 1),
8102 (ignore ? const0_rtx : target),
8106 /* If we would have a "singleton" (see below) were it not for a
8107 conversion in each arm, bring that conversion back out. */
8108 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8109 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8110 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8111 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8113 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8114 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8116 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8117 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8118 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8119 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8120 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8121 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8122 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8123 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8124 return expand_expr (build1 (NOP_EXPR, type,
8125 build (COND_EXPR, TREE_TYPE (iftrue),
8126 TREE_OPERAND (exp, 0),
8128 target, tmode, modifier);
8132 /* Note that COND_EXPRs whose type is a structure or union
8133 are required to be constructed to contain assignments of
8134 a temporary variable, so that we can evaluate them here
8135 for side effect only. If type is void, we must do likewise. */
8137 /* If an arm of the branch requires a cleanup,
8138 only that cleanup is performed. */
8141 tree binary_op = 0, unary_op = 0;
8143 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8144 convert it to our mode, if necessary. */
8145 if (integer_onep (TREE_OPERAND (exp, 1))
8146 && integer_zerop (TREE_OPERAND (exp, 2))
8147 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8151 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8156 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8157 if (GET_MODE (op0) == mode)
8161 target = gen_reg_rtx (mode);
8162 convert_move (target, op0, unsignedp);
8166 /* Check for X ? A + B : A. If we have this, we can copy A to the
8167 output and conditionally add B. Similarly for unary operations.
8168 Don't do this if X has side-effects because those side effects
8169 might affect A or B and the "?" operation is a sequence point in
8170 ANSI. (operand_equal_p tests for side effects.) */
8172 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8173 && operand_equal_p (TREE_OPERAND (exp, 2),
8174 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8175 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8176 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8177 && operand_equal_p (TREE_OPERAND (exp, 1),
8178 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8179 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8180 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8181 && operand_equal_p (TREE_OPERAND (exp, 2),
8182 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8183 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8184 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8185 && operand_equal_p (TREE_OPERAND (exp, 1),
8186 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8187 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8189 /* If we are not to produce a result, we have no target. Otherwise,
8190 if a target was specified use it; it will not be used as an
8191 intermediate target unless it is safe. If no target, use a
8196 else if (original_target
8197 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8198 || (singleton && GET_CODE (original_target) == REG
8199 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8200 && original_target == var_rtx (singleton)))
8201 && GET_MODE (original_target) == mode
8202 #ifdef HAVE_conditional_move
8203 && (! can_conditionally_move_p (mode)
8204 || GET_CODE (original_target) == REG
8205 || TREE_ADDRESSABLE (type))
8207 && (GET_CODE (original_target) != MEM
8208 || TREE_ADDRESSABLE (type)))
8209 temp = original_target;
8210 else if (TREE_ADDRESSABLE (type))
8213 temp = assign_temp (type, 0, 0, 1);
8215 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8216 do the test of X as a store-flag operation, do this as
8217 A + ((X != 0) << log C). Similarly for other simple binary
8218 operators. Only do for C == 1 if BRANCH_COST is low. */
8219 if (temp && singleton && binary_op
8220 && (TREE_CODE (binary_op) == PLUS_EXPR
8221 || TREE_CODE (binary_op) == MINUS_EXPR
8222 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8223 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8224 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8225 : integer_onep (TREE_OPERAND (binary_op, 1)))
8226 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8229 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8230 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8231 ? addv_optab : add_optab)
8232 : TREE_CODE (binary_op) == MINUS_EXPR
8233 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8234 ? subv_optab : sub_optab)
8235 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8238 /* If we had X ? A : A + 1, do this as A + (X == 0).
8240 We have to invert the truth value here and then put it
8241 back later if do_store_flag fails. We cannot simply copy
8242 TREE_OPERAND (exp, 0) to another variable and modify that
8243 because invert_truthvalue can modify the tree pointed to
8245 if (singleton == TREE_OPERAND (exp, 1))
8246 TREE_OPERAND (exp, 0)
8247 = invert_truthvalue (TREE_OPERAND (exp, 0));
8249 result = do_store_flag (TREE_OPERAND (exp, 0),
8250 (safe_from_p (temp, singleton, 1)
8252 mode, BRANCH_COST <= 1);
8254 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8255 result = expand_shift (LSHIFT_EXPR, mode, result,
8256 build_int_2 (tree_log2
8260 (safe_from_p (temp, singleton, 1)
8261 ? temp : NULL_RTX), 0);
8265 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8266 return expand_binop (mode, boptab, op1, result, temp,
8267 unsignedp, OPTAB_LIB_WIDEN);
8269 else if (singleton == TREE_OPERAND (exp, 1))
8270 TREE_OPERAND (exp, 0)
8271 = invert_truthvalue (TREE_OPERAND (exp, 0));
8274 do_pending_stack_adjust ();
8276 op0 = gen_label_rtx ();
8278 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8282 /* If the target conflicts with the other operand of the
8283 binary op, we can't use it. Also, we can't use the target
8284 if it is a hard register, because evaluating the condition
8285 might clobber it. */
8287 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8288 || (GET_CODE (temp) == REG
8289 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8290 temp = gen_reg_rtx (mode);
8291 store_expr (singleton, temp, 0);
8294 expand_expr (singleton,
8295 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8296 if (singleton == TREE_OPERAND (exp, 1))
8297 jumpif (TREE_OPERAND (exp, 0), op0);
8299 jumpifnot (TREE_OPERAND (exp, 0), op0);
8301 start_cleanup_deferral ();
8302 if (binary_op && temp == 0)
8303 /* Just touch the other operand. */
8304 expand_expr (TREE_OPERAND (binary_op, 1),
8305 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8307 store_expr (build (TREE_CODE (binary_op), type,
8308 make_tree (type, temp),
8309 TREE_OPERAND (binary_op, 1)),
8312 store_expr (build1 (TREE_CODE (unary_op), type,
8313 make_tree (type, temp)),
8317 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8318 comparison operator. If we have one of these cases, set the
8319 output to A, branch on A (cse will merge these two references),
8320 then set the output to FOO. */
8322 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8323 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8324 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8325 TREE_OPERAND (exp, 1), 0)
8326 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8327 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8328 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8330 if (GET_CODE (temp) == REG
8331 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8332 temp = gen_reg_rtx (mode);
8333 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8334 jumpif (TREE_OPERAND (exp, 0), op0);
8336 start_cleanup_deferral ();
8337 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8341 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8342 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8343 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8344 TREE_OPERAND (exp, 2), 0)
8345 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8346 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8347 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8349 if (GET_CODE (temp) == REG
8350 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8351 temp = gen_reg_rtx (mode);
8352 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8353 jumpifnot (TREE_OPERAND (exp, 0), op0);
8355 start_cleanup_deferral ();
8356 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8361 op1 = gen_label_rtx ();
8362 jumpifnot (TREE_OPERAND (exp, 0), op0);
8364 start_cleanup_deferral ();
8366 /* One branch of the cond can be void, if it never returns. For
8367 example A ? throw : E */
8369 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8370 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8372 expand_expr (TREE_OPERAND (exp, 1),
8373 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8374 end_cleanup_deferral ();
8376 emit_jump_insn (gen_jump (op1));
8379 start_cleanup_deferral ();
8381 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8382 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8384 expand_expr (TREE_OPERAND (exp, 2),
8385 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8388 end_cleanup_deferral ();
8399 /* Something needs to be initialized, but we didn't know
8400 where that thing was when building the tree. For example,
8401 it could be the return value of a function, or a parameter
8402 to a function which lays down in the stack, or a temporary
8403 variable which must be passed by reference.
8405 We guarantee that the expression will either be constructed
8406 or copied into our original target. */
8408 tree slot = TREE_OPERAND (exp, 0);
8409 tree cleanups = NULL_TREE;
8412 if (TREE_CODE (slot) != VAR_DECL)
8416 target = original_target;
8418 /* Set this here so that if we get a target that refers to a
8419 register variable that's already been used, put_reg_into_stack
8420 knows that it should fix up those uses. */
8421 TREE_USED (slot) = 1;
8425 if (DECL_RTL_SET_P (slot))
8427 target = DECL_RTL (slot);
8428 /* If we have already expanded the slot, so don't do
8430 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8435 target = assign_temp (type, 2, 0, 1);
8436 /* All temp slots at this level must not conflict. */
8437 preserve_temp_slots (target);
8438 SET_DECL_RTL (slot, target);
8439 if (TREE_ADDRESSABLE (slot))
8440 put_var_into_stack (slot);
8442 /* Since SLOT is not known to the called function
8443 to belong to its stack frame, we must build an explicit
8444 cleanup. This case occurs when we must build up a reference
8445 to pass the reference as an argument. In this case,
8446 it is very likely that such a reference need not be
8449 if (TREE_OPERAND (exp, 2) == 0)
8450 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8451 cleanups = TREE_OPERAND (exp, 2);
8456 /* This case does occur, when expanding a parameter which
8457 needs to be constructed on the stack. The target
8458 is the actual stack address that we want to initialize.
8459 The function we call will perform the cleanup in this case. */
8461 /* If we have already assigned it space, use that space,
8462 not target that we were passed in, as our target
8463 parameter is only a hint. */
8464 if (DECL_RTL_SET_P (slot))
8466 target = DECL_RTL (slot);
8467 /* If we have already expanded the slot, so don't do
8469 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8474 SET_DECL_RTL (slot, target);
8475 /* If we must have an addressable slot, then make sure that
8476 the RTL that we just stored in slot is OK. */
8477 if (TREE_ADDRESSABLE (slot))
8478 put_var_into_stack (slot);
8482 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8483 /* Mark it as expanded. */
8484 TREE_OPERAND (exp, 1) = NULL_TREE;
8486 store_expr (exp1, target, 0);
8488 expand_decl_cleanup (NULL_TREE, cleanups);
8495 tree lhs = TREE_OPERAND (exp, 0);
8496 tree rhs = TREE_OPERAND (exp, 1);
8498 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8504 /* If lhs is complex, expand calls in rhs before computing it.
8505 That's so we don't compute a pointer and save it over a
8506 call. If lhs is simple, compute it first so we can give it
8507 as a target if the rhs is just a call. This avoids an
8508 extra temp and copy and that prevents a partial-subsumption
8509 which makes bad code. Actually we could treat
8510 component_ref's of vars like vars. */
8512 tree lhs = TREE_OPERAND (exp, 0);
8513 tree rhs = TREE_OPERAND (exp, 1);
8517 /* Check for |= or &= of a bitfield of size one into another bitfield
8518 of size 1. In this case, (unless we need the result of the
8519 assignment) we can do this more efficiently with a
8520 test followed by an assignment, if necessary.
8522 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8523 things change so we do, this code should be enhanced to
8526 && TREE_CODE (lhs) == COMPONENT_REF
8527 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8528 || TREE_CODE (rhs) == BIT_AND_EXPR)
8529 && TREE_OPERAND (rhs, 0) == lhs
8530 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8531 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8532 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8534 rtx label = gen_label_rtx ();
8536 do_jump (TREE_OPERAND (rhs, 1),
8537 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8538 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8539 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8540 (TREE_CODE (rhs) == BIT_IOR_EXPR
8542 : integer_zero_node)),
8544 do_pending_stack_adjust ();
8549 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8555 if (!TREE_OPERAND (exp, 0))
8556 expand_null_return ();
8558 expand_return (TREE_OPERAND (exp, 0));
8561 case PREINCREMENT_EXPR:
8562 case PREDECREMENT_EXPR:
8563 return expand_increment (exp, 0, ignore);
8565 case POSTINCREMENT_EXPR:
8566 case POSTDECREMENT_EXPR:
8567 /* Faster to treat as pre-increment if result is not used. */
8568 return expand_increment (exp, ! ignore, ignore);
8571 /* If nonzero, TEMP will be set to the address of something that might
8572 be a MEM corresponding to a stack slot. */
8575 /* Are we taking the address of a nested function? */
8576 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8577 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8578 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8579 && ! TREE_STATIC (exp))
8581 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8582 op0 = force_operand (op0, target);
8584 /* If we are taking the address of something erroneous, just
8586 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8590 /* We make sure to pass const0_rtx down if we came in with
8591 ignore set, to avoid doing the cleanups twice for something. */
8592 op0 = expand_expr (TREE_OPERAND (exp, 0),
8593 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8594 (modifier == EXPAND_INITIALIZER
8595 ? modifier : EXPAND_CONST_ADDRESS));
8597 /* If we are going to ignore the result, OP0 will have been set
8598 to const0_rtx, so just return it. Don't get confused and
8599 think we are taking the address of the constant. */
8603 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8604 clever and returns a REG when given a MEM. */
8605 op0 = protect_from_queue (op0, 1);
8607 /* We would like the object in memory. If it is a constant, we can
8608 have it be statically allocated into memory. For a non-constant,
8609 we need to allocate some memory and store the value into it. */
8611 if (CONSTANT_P (op0))
8612 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8614 else if (GET_CODE (op0) == MEM)
8616 mark_temp_addr_taken (op0);
8617 temp = XEXP (op0, 0);
8620 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8621 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8622 || GET_CODE (op0) == PARALLEL)
8624 /* If this object is in a register, it must be not
8626 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8627 tree nt = build_qualified_type (inner_type,
8628 (TYPE_QUALS (inner_type)
8629 | TYPE_QUAL_CONST));
8630 rtx memloc = assign_temp (nt, 1, 1, 1);
8632 mark_temp_addr_taken (memloc);
8633 if (GET_CODE (op0) == PARALLEL)
8634 /* Handle calls that pass values in multiple non-contiguous
8635 locations. The Irix 6 ABI has examples of this. */
8636 emit_group_store (memloc, op0,
8637 int_size_in_bytes (inner_type),
8638 TYPE_ALIGN (inner_type));
8640 emit_move_insn (memloc, op0);
8644 if (GET_CODE (op0) != MEM)
8647 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8649 temp = XEXP (op0, 0);
8650 #ifdef POINTERS_EXTEND_UNSIGNED
8651 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8652 && mode == ptr_mode)
8653 temp = convert_memory_address (ptr_mode, temp);
8658 op0 = force_operand (XEXP (op0, 0), target);
8661 if (flag_force_addr && GET_CODE (op0) != REG)
8662 op0 = force_reg (Pmode, op0);
8664 if (GET_CODE (op0) == REG
8665 && ! REG_USERVAR_P (op0))
8666 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8668 /* If we might have had a temp slot, add an equivalent address
8671 update_temp_slot_address (temp, op0);
8673 #ifdef POINTERS_EXTEND_UNSIGNED
8674 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8675 && mode == ptr_mode)
8676 op0 = convert_memory_address (ptr_mode, op0);
8681 case ENTRY_VALUE_EXPR:
8684 /* COMPLEX type for Extended Pascal & Fortran */
8687 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8690 /* Get the rtx code of the operands. */
8691 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8692 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8695 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8699 /* Move the real (op0) and imaginary (op1) parts to their location. */
8700 emit_move_insn (gen_realpart (mode, target), op0);
8701 emit_move_insn (gen_imagpart (mode, target), op1);
8703 insns = get_insns ();
8706 /* Complex construction should appear as a single unit. */
8707 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8708 each with a separate pseudo as destination.
8709 It's not correct for flow to treat them as a unit. */
8710 if (GET_CODE (target) != CONCAT)
8711 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8719 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8720 return gen_realpart (mode, op0);
8723 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8724 return gen_imagpart (mode, op0);
8728 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8732 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8735 target = gen_reg_rtx (mode);
8739 /* Store the realpart and the negated imagpart to target. */
8740 emit_move_insn (gen_realpart (partmode, target),
8741 gen_realpart (partmode, op0));
8743 imag_t = gen_imagpart (partmode, target);
8744 temp = expand_unop (partmode,
8745 ! unsignedp && flag_trapv
8746 && (GET_MODE_CLASS(partmode) == MODE_INT)
8747 ? negv_optab : neg_optab,
8748 gen_imagpart (partmode, op0), imag_t, 0);
8750 emit_move_insn (imag_t, temp);
8752 insns = get_insns ();
8755 /* Conjugate should appear as a single unit
8756 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8757 each with a separate pseudo as destination.
8758 It's not correct for flow to treat them as a unit. */
8759 if (GET_CODE (target) != CONCAT)
8760 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8767 case TRY_CATCH_EXPR:
8769 tree handler = TREE_OPERAND (exp, 1);
8771 expand_eh_region_start ();
8773 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8775 expand_eh_region_end_cleanup (handler);
8780 case TRY_FINALLY_EXPR:
8782 tree try_block = TREE_OPERAND (exp, 0);
8783 tree finally_block = TREE_OPERAND (exp, 1);
8784 rtx finally_label = gen_label_rtx ();
8785 rtx done_label = gen_label_rtx ();
8786 rtx return_link = gen_reg_rtx (Pmode);
8787 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8788 (tree) finally_label, (tree) return_link);
8789 TREE_SIDE_EFFECTS (cleanup) = 1;
8791 /* Start a new binding layer that will keep track of all cleanup
8792 actions to be performed. */
8793 expand_start_bindings (2);
8795 target_temp_slot_level = temp_slot_level;
8797 expand_decl_cleanup (NULL_TREE, cleanup);
8798 op0 = expand_expr (try_block, target, tmode, modifier);
8800 preserve_temp_slots (op0);
8801 expand_end_bindings (NULL_TREE, 0, 0);
8802 emit_jump (done_label);
8803 emit_label (finally_label);
8804 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8805 emit_indirect_jump (return_link);
8806 emit_label (done_label);
8810 case GOTO_SUBROUTINE_EXPR:
8812 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8813 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8814 rtx return_address = gen_label_rtx ();
8815 emit_move_insn (return_link,
8816 gen_rtx_LABEL_REF (Pmode, return_address));
8818 emit_label (return_address);
8823 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8826 return get_exception_pointer (cfun);
8829 /* Function descriptors are not valid except for as
8830 initialization constants, and should not be expanded. */
8834 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8837 /* Here to do an ordinary binary operator, generating an instruction
8838 from the optab already placed in `this_optab'. */
8840 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8842 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8843 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8845 temp = expand_binop (mode, this_optab, op0, op1, target,
8846 unsignedp, OPTAB_LIB_WIDEN);
8852 /* Similar to expand_expr, except that we don't specify a target, target
8853 mode, or modifier and we return the alignment of the inner type. This is
8854 used in cases where it is not necessary to align the result to the
8855 alignment of its type as long as we know the alignment of the result, for
8856 example for comparisons of BLKmode values. */
8859 expand_expr_unaligned (exp, palign)
8861 unsigned int *palign;
8864 tree type = TREE_TYPE (exp);
8865 enum machine_mode mode = TYPE_MODE (type);
8867 /* Default the alignment we return to that of the type. */
8868 *palign = TYPE_ALIGN (type);
8870 /* The only cases in which we do anything special is if the resulting mode
8872 if (mode != BLKmode)
8873 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8875 switch (TREE_CODE (exp))
8879 case NON_LVALUE_EXPR:
8880 /* Conversions between BLKmode values don't change the underlying
8881 alignment or value. */
8882 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8883 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8887 /* Much of the code for this case is copied directly from expand_expr.
8888 We need to duplicate it here because we will do something different
8889 in the fall-through case, so we need to handle the same exceptions
8892 tree array = TREE_OPERAND (exp, 0);
8893 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8894 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8895 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8898 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8901 /* Optimize the special-case of a zero lower bound.
8903 We convert the low_bound to sizetype to avoid some problems
8904 with constant folding. (E.g. suppose the lower bound is 1,
8905 and its mode is QI. Without the conversion, (ARRAY
8906 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8907 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8909 if (! integer_zerop (low_bound))
8910 index = size_diffop (index, convert (sizetype, low_bound));
8912 /* If this is a constant index into a constant array,
8913 just get the value from the array. Handle both the cases when
8914 we have an explicit constructor and when our operand is a variable
8915 that was declared const. */
8917 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8918 && host_integerp (index, 0)
8919 && 0 > compare_tree_int (index,
8920 list_length (CONSTRUCTOR_ELTS
8921 (TREE_OPERAND (exp, 0)))))
8925 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8926 i = tree_low_cst (index, 0);
8927 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8931 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8934 else if (optimize >= 1
8935 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8936 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8937 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8939 if (TREE_CODE (index) == INTEGER_CST)
8941 tree init = DECL_INITIAL (array);
8943 if (TREE_CODE (init) == CONSTRUCTOR)
8947 for (elem = CONSTRUCTOR_ELTS (init);
8948 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8949 elem = TREE_CHAIN (elem))
8953 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8963 case ARRAY_RANGE_REF:
8964 /* If the operand is a CONSTRUCTOR, we can just extract the
8965 appropriate field if it is present. Don't do this if we have
8966 already written the data since we want to refer to that copy
8967 and varasm.c assumes that's what we'll do. */
8968 if (TREE_CODE (exp) == COMPONENT_REF
8969 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8970 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8974 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8975 elt = TREE_CHAIN (elt))
8976 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8977 /* Note that unlike the case in expand_expr, we know this is
8978 BLKmode and hence not an integer. */
8979 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8983 enum machine_mode mode1;
8984 HOST_WIDE_INT bitsize, bitpos;
8987 unsigned int alignment;
8989 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8990 &mode1, &unsignedp, &volatilep,
8993 /* If we got back the original object, something is wrong. Perhaps
8994 we are evaluating an expression too early. In any event, don't
8995 infinitely recurse. */
8999 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9001 /* If this is a constant, put it into a register if it is a
9002 legitimate constant and OFFSET is 0 and memory if it isn't. */
9003 if (CONSTANT_P (op0))
9005 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9007 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9009 op0 = force_reg (inner_mode, op0);
9011 op0 = validize_mem (force_const_mem (inner_mode, op0));
9016 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9018 /* If this object is in a register, put it into memory.
9019 This case can't occur in C, but can in Ada if we have
9020 unchecked conversion of an expression from a scalar type to
9021 an array or record type. */
9022 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9023 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9025 tree nt = build_qualified_type (TREE_TYPE (tem),
9026 (TYPE_QUALS (TREE_TYPE (tem))
9027 | TYPE_QUAL_CONST));
9028 rtx memloc = assign_temp (nt, 1, 1, 1);
9030 mark_temp_addr_taken (memloc);
9031 emit_move_insn (memloc, op0);
9035 if (GET_CODE (op0) != MEM)
9038 if (GET_MODE (offset_rtx) != ptr_mode)
9040 #ifdef POINTERS_EXTEND_UNSIGNED
9041 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9043 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9047 op0 = change_address (op0, VOIDmode,
9048 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9049 force_reg (ptr_mode,
9053 /* Don't forget about volatility even if this is a bitfield. */
9054 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9056 op0 = copy_rtx (op0);
9057 MEM_VOLATILE_P (op0) = 1;
9060 /* Check the access. */
9061 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9066 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9067 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9069 /* Check the access right of the pointer. */
9070 in_check_memory_usage = 1;
9071 if (size > BITS_PER_UNIT)
9072 emit_library_call (chkr_check_addr_libfunc,
9073 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9074 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9075 TYPE_MODE (sizetype),
9076 GEN_INT (MEMORY_USE_RO),
9077 TYPE_MODE (integer_type_node));
9078 in_check_memory_usage = 0;
9081 /* In cases where an aligned union has an unaligned object
9082 as a field, we might be extracting a BLKmode value from
9083 an integer-mode (e.g., SImode) object. Handle this case
9084 by doing the extract into an object as wide as the field
9085 (which we know to be the width of a basic mode), then
9086 storing into memory, and changing the mode to BLKmode.
9087 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9088 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9089 if (mode1 == VOIDmode
9090 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9091 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9092 && (TYPE_ALIGN (type) > alignment
9093 || bitpos % TYPE_ALIGN (type) != 0)))
9095 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9097 if (ext_mode == BLKmode)
9099 /* In this case, BITPOS must start at a byte boundary. */
9100 if (GET_CODE (op0) != MEM
9101 || bitpos % BITS_PER_UNIT != 0)
9104 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9108 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9110 rtx new = assign_temp (nt, 0, 1, 1);
9112 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9113 unsignedp, NULL_RTX, ext_mode,
9114 ext_mode, alignment,
9115 int_size_in_bytes (TREE_TYPE (tem)));
9117 /* If the result is a record type and BITSIZE is narrower than
9118 the mode of OP0, an integral mode, and this is a big endian
9119 machine, we must put the field into the high-order bits. */
9120 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9121 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9122 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9123 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9124 size_int (GET_MODE_BITSIZE
9129 emit_move_insn (new, op0);
9130 op0 = copy_rtx (new);
9131 PUT_MODE (op0, BLKmode);
9135 /* Get a reference to just this component. */
9136 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9138 set_mem_alias_set (op0, get_alias_set (exp));
9140 /* Adjust the alignment in case the bit position is not
9141 a multiple of the alignment of the inner object. */
9142 while (bitpos % alignment != 0)
9145 if (GET_CODE (XEXP (op0, 0)) == REG)
9146 mark_reg_pointer (XEXP (op0, 0), alignment);
9148 MEM_IN_STRUCT_P (op0) = 1;
9149 MEM_VOLATILE_P (op0) |= volatilep;
9151 *palign = alignment;
9160 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9163 /* Return the tree node if a ARG corresponds to a string constant or zero
9164 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9165 in bytes within the string that ARG is accessing. The type of the
9166 offset will be `sizetype'. */
9169 string_constant (arg, ptr_offset)
9175 if (TREE_CODE (arg) == ADDR_EXPR
9176 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9178 *ptr_offset = size_zero_node;
9179 return TREE_OPERAND (arg, 0);
9181 else if (TREE_CODE (arg) == PLUS_EXPR)
9183 tree arg0 = TREE_OPERAND (arg, 0);
9184 tree arg1 = TREE_OPERAND (arg, 1);
9189 if (TREE_CODE (arg0) == ADDR_EXPR
9190 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9192 *ptr_offset = convert (sizetype, arg1);
9193 return TREE_OPERAND (arg0, 0);
9195 else if (TREE_CODE (arg1) == ADDR_EXPR
9196 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9198 *ptr_offset = convert (sizetype, arg0);
9199 return TREE_OPERAND (arg1, 0);
9206 /* Expand code for a post- or pre- increment or decrement
9207 and return the RTX for the result.
9208 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9211 expand_increment (exp, post, ignore)
9217 tree incremented = TREE_OPERAND (exp, 0);
9218 optab this_optab = add_optab;
9220 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9221 int op0_is_copy = 0;
9222 int single_insn = 0;
9223 /* 1 means we can't store into OP0 directly,
9224 because it is a subreg narrower than a word,
9225 and we don't dare clobber the rest of the word. */
9228 /* Stabilize any component ref that might need to be
9229 evaluated more than once below. */
9231 || TREE_CODE (incremented) == BIT_FIELD_REF
9232 || (TREE_CODE (incremented) == COMPONENT_REF
9233 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9234 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9235 incremented = stabilize_reference (incremented);
9236 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9237 ones into save exprs so that they don't accidentally get evaluated
9238 more than once by the code below. */
9239 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9240 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9241 incremented = save_expr (incremented);
9243 /* Compute the operands as RTX.
9244 Note whether OP0 is the actual lvalue or a copy of it:
9245 I believe it is a copy iff it is a register or subreg
9246 and insns were generated in computing it. */
9248 temp = get_last_insn ();
9249 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9251 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9252 in place but instead must do sign- or zero-extension during assignment,
9253 so we copy it into a new register and let the code below use it as
9256 Note that we can safely modify this SUBREG since it is know not to be
9257 shared (it was made by the expand_expr call above). */
9259 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9262 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9266 else if (GET_CODE (op0) == SUBREG
9267 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9269 /* We cannot increment this SUBREG in place. If we are
9270 post-incrementing, get a copy of the old value. Otherwise,
9271 just mark that we cannot increment in place. */
9273 op0 = copy_to_reg (op0);
9278 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9279 && temp != get_last_insn ());
9280 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9281 EXPAND_MEMORY_USE_BAD);
9283 /* Decide whether incrementing or decrementing. */
9284 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9285 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9286 this_optab = sub_optab;
9288 /* Convert decrement by a constant into a negative increment. */
9289 if (this_optab == sub_optab
9290 && GET_CODE (op1) == CONST_INT)
9292 op1 = GEN_INT (-INTVAL (op1));
9293 this_optab = add_optab;
9296 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9297 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9299 /* For a preincrement, see if we can do this with a single instruction. */
9302 icode = (int) this_optab->handlers[(int) mode].insn_code;
9303 if (icode != (int) CODE_FOR_nothing
9304 /* Make sure that OP0 is valid for operands 0 and 1
9305 of the insn we want to queue. */
9306 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9307 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9308 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9312 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9313 then we cannot just increment OP0. We must therefore contrive to
9314 increment the original value. Then, for postincrement, we can return
9315 OP0 since it is a copy of the old value. For preincrement, expand here
9316 unless we can do it with a single insn.
9318 Likewise if storing directly into OP0 would clobber high bits
9319 we need to preserve (bad_subreg). */
9320 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9322 /* This is the easiest way to increment the value wherever it is.
9323 Problems with multiple evaluation of INCREMENTED are prevented
9324 because either (1) it is a component_ref or preincrement,
9325 in which case it was stabilized above, or (2) it is an array_ref
9326 with constant index in an array in a register, which is
9327 safe to reevaluate. */
9328 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9329 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9330 ? MINUS_EXPR : PLUS_EXPR),
9333 TREE_OPERAND (exp, 1));
9335 while (TREE_CODE (incremented) == NOP_EXPR
9336 || TREE_CODE (incremented) == CONVERT_EXPR)
9338 newexp = convert (TREE_TYPE (incremented), newexp);
9339 incremented = TREE_OPERAND (incremented, 0);
9342 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9343 return post ? op0 : temp;
9348 /* We have a true reference to the value in OP0.
9349 If there is an insn to add or subtract in this mode, queue it.
9350 Queueing the increment insn avoids the register shuffling
9351 that often results if we must increment now and first save
9352 the old value for subsequent use. */
9354 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9355 op0 = stabilize (op0);
9358 icode = (int) this_optab->handlers[(int) mode].insn_code;
9359 if (icode != (int) CODE_FOR_nothing
9360 /* Make sure that OP0 is valid for operands 0 and 1
9361 of the insn we want to queue. */
9362 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9363 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9365 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9366 op1 = force_reg (mode, op1);
9368 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9370 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9372 rtx addr = (general_operand (XEXP (op0, 0), mode)
9373 ? force_reg (Pmode, XEXP (op0, 0))
9374 : copy_to_reg (XEXP (op0, 0)));
9377 op0 = replace_equiv_address (op0, addr);
9378 temp = force_reg (GET_MODE (op0), op0);
9379 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9380 op1 = force_reg (mode, op1);
9382 /* The increment queue is LIFO, thus we have to `queue'
9383 the instructions in reverse order. */
9384 enqueue_insn (op0, gen_move_insn (op0, temp));
9385 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9390 /* Preincrement, or we can't increment with one simple insn. */
9392 /* Save a copy of the value before inc or dec, to return it later. */
9393 temp = value = copy_to_reg (op0);
9395 /* Arrange to return the incremented value. */
9396 /* Copy the rtx because expand_binop will protect from the queue,
9397 and the results of that would be invalid for us to return
9398 if our caller does emit_queue before using our result. */
9399 temp = copy_rtx (value = op0);
9401 /* Increment however we can. */
9402 op1 = expand_binop (mode, this_optab, value, op1,
9403 current_function_check_memory_usage ? NULL_RTX : op0,
9404 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9405 /* Make sure the value is stored into OP0. */
9407 emit_move_insn (op0, op1);
9412 /* At the start of a function, record that we have no previously-pushed
9413 arguments waiting to be popped. */
9416 init_pending_stack_adjust ()
9418 pending_stack_adjust = 0;
9421 /* When exiting from function, if safe, clear out any pending stack adjust
9422 so the adjustment won't get done.
9424 Note, if the current function calls alloca, then it must have a
9425 frame pointer regardless of the value of flag_omit_frame_pointer. */
9428 clear_pending_stack_adjust ()
9430 #ifdef EXIT_IGNORE_STACK
9432 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9433 && EXIT_IGNORE_STACK
9434 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9435 && ! flag_inline_functions)
9437 stack_pointer_delta -= pending_stack_adjust,
9438 pending_stack_adjust = 0;
9443 /* Pop any previously-pushed arguments that have not been popped yet. */
9446 do_pending_stack_adjust ()
9448 if (inhibit_defer_pop == 0)
9450 if (pending_stack_adjust != 0)
9451 adjust_stack (GEN_INT (pending_stack_adjust));
9452 pending_stack_adjust = 0;
9456 /* Expand conditional expressions. */
9458 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9459 LABEL is an rtx of code CODE_LABEL, in this function and all the
9463 jumpifnot (exp, label)
9467 do_jump (exp, label, NULL_RTX);
9470 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9477 do_jump (exp, NULL_RTX, label);
9480 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9481 the result is zero, or IF_TRUE_LABEL if the result is one.
9482 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9483 meaning fall through in that case.
9485 do_jump always does any pending stack adjust except when it does not
9486 actually perform a jump. An example where there is no jump
9487 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9489 This function is responsible for optimizing cases such as
9490 &&, || and comparison operators in EXP. */
9493 do_jump (exp, if_false_label, if_true_label)
9495 rtx if_false_label, if_true_label;
9497 enum tree_code code = TREE_CODE (exp);
9498 /* Some cases need to create a label to jump to
9499 in order to properly fall through.
9500 These cases set DROP_THROUGH_LABEL nonzero. */
9501 rtx drop_through_label = 0;
9505 enum machine_mode mode;
9507 #ifdef MAX_INTEGER_COMPUTATION_MODE
9508 check_max_integer_computation_mode (exp);
9519 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9525 /* This is not true with #pragma weak */
9527 /* The address of something can never be zero. */
9529 emit_jump (if_true_label);
9534 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9535 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9536 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9537 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9540 /* If we are narrowing the operand, we have to do the compare in the
9542 if ((TYPE_PRECISION (TREE_TYPE (exp))
9543 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9545 case NON_LVALUE_EXPR:
9546 case REFERENCE_EXPR:
9551 /* These cannot change zero->non-zero or vice versa. */
9552 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9555 case WITH_RECORD_EXPR:
9556 /* Put the object on the placeholder list, recurse through our first
9557 operand, and pop the list. */
9558 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9560 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9561 placeholder_list = TREE_CHAIN (placeholder_list);
9565 /* This is never less insns than evaluating the PLUS_EXPR followed by
9566 a test and can be longer if the test is eliminated. */
9568 /* Reduce to minus. */
9569 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9570 TREE_OPERAND (exp, 0),
9571 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9572 TREE_OPERAND (exp, 1))));
9573 /* Process as MINUS. */
9577 /* Non-zero iff operands of minus differ. */
9578 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9579 TREE_OPERAND (exp, 0),
9580 TREE_OPERAND (exp, 1)),
9581 NE, NE, if_false_label, if_true_label);
9585 /* If we are AND'ing with a small constant, do this comparison in the
9586 smallest type that fits. If the machine doesn't have comparisons
9587 that small, it will be converted back to the wider comparison.
9588 This helps if we are testing the sign bit of a narrower object.
9589 combine can't do this for us because it can't know whether a
9590 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9592 if (! SLOW_BYTE_ACCESS
9593 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9594 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9595 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9596 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9597 && (type = type_for_mode (mode, 1)) != 0
9598 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9599 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9600 != CODE_FOR_nothing))
9602 do_jump (convert (type, exp), if_false_label, if_true_label);
9607 case TRUTH_NOT_EXPR:
9608 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9611 case TRUTH_ANDIF_EXPR:
9612 if (if_false_label == 0)
9613 if_false_label = drop_through_label = gen_label_rtx ();
9614 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9615 start_cleanup_deferral ();
9616 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9617 end_cleanup_deferral ();
9620 case TRUTH_ORIF_EXPR:
9621 if (if_true_label == 0)
9622 if_true_label = drop_through_label = gen_label_rtx ();
9623 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9624 start_cleanup_deferral ();
9625 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9626 end_cleanup_deferral ();
9631 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9632 preserve_temp_slots (NULL_RTX);
9636 do_pending_stack_adjust ();
9637 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9643 case ARRAY_RANGE_REF:
9645 HOST_WIDE_INT bitsize, bitpos;
9647 enum machine_mode mode;
9651 unsigned int alignment;
9653 /* Get description of this reference. We don't actually care
9654 about the underlying object here. */
9655 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9656 &unsignedp, &volatilep, &alignment);
9658 type = type_for_size (bitsize, unsignedp);
9659 if (! SLOW_BYTE_ACCESS
9660 && type != 0 && bitsize >= 0
9661 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9662 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9663 != CODE_FOR_nothing))
9665 do_jump (convert (type, exp), if_false_label, if_true_label);
9672 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9673 if (integer_onep (TREE_OPERAND (exp, 1))
9674 && integer_zerop (TREE_OPERAND (exp, 2)))
9675 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9677 else if (integer_zerop (TREE_OPERAND (exp, 1))
9678 && integer_onep (TREE_OPERAND (exp, 2)))
9679 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9683 rtx label1 = gen_label_rtx ();
9684 drop_through_label = gen_label_rtx ();
9686 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9688 start_cleanup_deferral ();
9689 /* Now the THEN-expression. */
9690 do_jump (TREE_OPERAND (exp, 1),
9691 if_false_label ? if_false_label : drop_through_label,
9692 if_true_label ? if_true_label : drop_through_label);
9693 /* In case the do_jump just above never jumps. */
9694 do_pending_stack_adjust ();
9695 emit_label (label1);
9697 /* Now the ELSE-expression. */
9698 do_jump (TREE_OPERAND (exp, 2),
9699 if_false_label ? if_false_label : drop_through_label,
9700 if_true_label ? if_true_label : drop_through_label);
9701 end_cleanup_deferral ();
9707 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9709 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9710 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9712 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9713 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9716 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9717 fold (build (EQ_EXPR, TREE_TYPE (exp),
9718 fold (build1 (REALPART_EXPR,
9719 TREE_TYPE (inner_type),
9721 fold (build1 (REALPART_EXPR,
9722 TREE_TYPE (inner_type),
9724 fold (build (EQ_EXPR, TREE_TYPE (exp),
9725 fold (build1 (IMAGPART_EXPR,
9726 TREE_TYPE (inner_type),
9728 fold (build1 (IMAGPART_EXPR,
9729 TREE_TYPE (inner_type),
9731 if_false_label, if_true_label);
9734 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9735 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9737 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9738 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9739 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9741 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9747 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9749 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9750 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9752 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9753 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9756 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9757 fold (build (NE_EXPR, TREE_TYPE (exp),
9758 fold (build1 (REALPART_EXPR,
9759 TREE_TYPE (inner_type),
9761 fold (build1 (REALPART_EXPR,
9762 TREE_TYPE (inner_type),
9764 fold (build (NE_EXPR, TREE_TYPE (exp),
9765 fold (build1 (IMAGPART_EXPR,
9766 TREE_TYPE (inner_type),
9768 fold (build1 (IMAGPART_EXPR,
9769 TREE_TYPE (inner_type),
9771 if_false_label, if_true_label);
9774 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9775 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9777 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9778 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9779 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9781 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9786 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9787 if (GET_MODE_CLASS (mode) == MODE_INT
9788 && ! can_compare_p (LT, mode, ccp_jump))
9789 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9791 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9795 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9796 if (GET_MODE_CLASS (mode) == MODE_INT
9797 && ! can_compare_p (LE, mode, ccp_jump))
9798 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9800 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9804 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9805 if (GET_MODE_CLASS (mode) == MODE_INT
9806 && ! can_compare_p (GT, mode, ccp_jump))
9807 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9809 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9813 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9814 if (GET_MODE_CLASS (mode) == MODE_INT
9815 && ! can_compare_p (GE, mode, ccp_jump))
9816 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9818 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9821 case UNORDERED_EXPR:
9824 enum rtx_code cmp, rcmp;
9827 if (code == UNORDERED_EXPR)
9828 cmp = UNORDERED, rcmp = ORDERED;
9830 cmp = ORDERED, rcmp = UNORDERED;
9831 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9834 if (! can_compare_p (cmp, mode, ccp_jump)
9835 && (can_compare_p (rcmp, mode, ccp_jump)
9836 /* If the target doesn't provide either UNORDERED or ORDERED
9837 comparisons, canonicalize on UNORDERED for the library. */
9838 || rcmp == UNORDERED))
9842 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9844 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9849 enum rtx_code rcode1;
9850 enum tree_code tcode2;
9874 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9875 if (can_compare_p (rcode1, mode, ccp_jump))
9876 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9880 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9881 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9884 /* If the target doesn't support combined unordered
9885 compares, decompose into UNORDERED + comparison. */
9886 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9887 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9888 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9889 do_jump (exp, if_false_label, if_true_label);
9895 __builtin_expect (<test>, 0) and
9896 __builtin_expect (<test>, 1)
9898 We need to do this here, so that <test> is not converted to a SCC
9899 operation on machines that use condition code registers and COMPARE
9900 like the PowerPC, and then the jump is done based on whether the SCC
9901 operation produced a 1 or 0. */
9903 /* Check for a built-in function. */
9904 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9906 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9907 tree arglist = TREE_OPERAND (exp, 1);
9909 if (TREE_CODE (fndecl) == FUNCTION_DECL
9910 && DECL_BUILT_IN (fndecl)
9911 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9912 && arglist != NULL_TREE
9913 && TREE_CHAIN (arglist) != NULL_TREE)
9915 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9918 if (seq != NULL_RTX)
9925 /* fall through and generate the normal code. */
9929 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9931 /* This is not needed any more and causes poor code since it causes
9932 comparisons and tests from non-SI objects to have different code
9934 /* Copy to register to avoid generating bad insns by cse
9935 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9936 if (!cse_not_expected && GET_CODE (temp) == MEM)
9937 temp = copy_to_reg (temp);
9939 do_pending_stack_adjust ();
9940 /* Do any postincrements in the expression that was tested. */
9943 if (GET_CODE (temp) == CONST_INT
9944 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9945 || GET_CODE (temp) == LABEL_REF)
9947 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9951 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9952 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9953 /* Note swapping the labels gives us not-equal. */
9954 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9955 else if (GET_MODE (temp) != VOIDmode)
9956 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9957 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9958 GET_MODE (temp), NULL_RTX, 0,
9959 if_false_label, if_true_label);
9964 if (drop_through_label)
9966 /* If do_jump produces code that might be jumped around,
9967 do any stack adjusts from that code, before the place
9968 where control merges in. */
9969 do_pending_stack_adjust ();
9970 emit_label (drop_through_label);
9974 /* Given a comparison expression EXP for values too wide to be compared
9975 with one insn, test the comparison and jump to the appropriate label.
9976 The code of EXP is ignored; we always test GT if SWAP is 0,
9977 and LT if SWAP is 1. */
9980 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9983 rtx if_false_label, if_true_label;
9985 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9986 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9987 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9988 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9990 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9993 /* Compare OP0 with OP1, word at a time, in mode MODE.
9994 UNSIGNEDP says to do unsigned comparison.
9995 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9998 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9999 enum machine_mode mode;
10002 rtx if_false_label, if_true_label;
10004 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10005 rtx drop_through_label = 0;
10008 if (! if_true_label || ! if_false_label)
10009 drop_through_label = gen_label_rtx ();
10010 if (! if_true_label)
10011 if_true_label = drop_through_label;
10012 if (! if_false_label)
10013 if_false_label = drop_through_label;
10015 /* Compare a word at a time, high order first. */
10016 for (i = 0; i < nwords; i++)
10018 rtx op0_word, op1_word;
10020 if (WORDS_BIG_ENDIAN)
10022 op0_word = operand_subword_force (op0, i, mode);
10023 op1_word = operand_subword_force (op1, i, mode);
10027 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10028 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10031 /* All but high-order word must be compared as unsigned. */
10032 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10033 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10034 NULL_RTX, if_true_label);
10036 /* Consider lower words only if these are equal. */
10037 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10038 NULL_RTX, 0, NULL_RTX, if_false_label);
10041 if (if_false_label)
10042 emit_jump (if_false_label);
10043 if (drop_through_label)
10044 emit_label (drop_through_label);
10047 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10048 with one insn, test the comparison and jump to the appropriate label. */
10051 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10053 rtx if_false_label, if_true_label;
10055 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10056 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10057 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10058 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10060 rtx drop_through_label = 0;
10062 if (! if_false_label)
10063 drop_through_label = if_false_label = gen_label_rtx ();
10065 for (i = 0; i < nwords; i++)
10066 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10067 operand_subword_force (op1, i, mode),
10068 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10069 word_mode, NULL_RTX, 0, if_false_label,
10073 emit_jump (if_true_label);
10074 if (drop_through_label)
10075 emit_label (drop_through_label);
10078 /* Jump according to whether OP0 is 0.
10079 We assume that OP0 has an integer mode that is too wide
10080 for the available compare insns. */
10083 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10085 rtx if_false_label, if_true_label;
10087 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10090 rtx drop_through_label = 0;
10092 /* The fastest way of doing this comparison on almost any machine is to
10093 "or" all the words and compare the result. If all have to be loaded
10094 from memory and this is a very wide item, it's possible this may
10095 be slower, but that's highly unlikely. */
10097 part = gen_reg_rtx (word_mode);
10098 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10099 for (i = 1; i < nwords && part != 0; i++)
10100 part = expand_binop (word_mode, ior_optab, part,
10101 operand_subword_force (op0, i, GET_MODE (op0)),
10102 part, 1, OPTAB_WIDEN);
10106 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10107 NULL_RTX, 0, if_false_label, if_true_label);
10112 /* If we couldn't do the "or" simply, do this with a series of compares. */
10113 if (! if_false_label)
10114 drop_through_label = if_false_label = gen_label_rtx ();
10116 for (i = 0; i < nwords; i++)
10117 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10118 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10119 if_false_label, NULL_RTX);
10122 emit_jump (if_true_label);
10124 if (drop_through_label)
10125 emit_label (drop_through_label);
10128 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10129 (including code to compute the values to be compared)
10130 and set (CC0) according to the result.
10131 The decision as to signed or unsigned comparison must be made by the caller.
10133 We force a stack adjustment unless there are currently
10134 things pushed on the stack that aren't yet used.
10136 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10139 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10140 size of MODE should be used. */
10143 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10145 enum rtx_code code;
10147 enum machine_mode mode;
10149 unsigned int align;
10153 /* If one operand is constant, make it the second one. Only do this
10154 if the other operand is not constant as well. */
10156 if (swap_commutative_operands_p (op0, op1))
10161 code = swap_condition (code);
10164 if (flag_force_mem)
10166 op0 = force_not_mem (op0);
10167 op1 = force_not_mem (op1);
10170 do_pending_stack_adjust ();
10172 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10173 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10177 /* There's no need to do this now that combine.c can eliminate lots of
10178 sign extensions. This can be less efficient in certain cases on other
10181 /* If this is a signed equality comparison, we can do it as an
10182 unsigned comparison since zero-extension is cheaper than sign
10183 extension and comparisons with zero are done as unsigned. This is
10184 the case even on machines that can do fast sign extension, since
10185 zero-extension is easier to combine with other operations than
10186 sign-extension is. If we are comparing against a constant, we must
10187 convert it to what it would look like unsigned. */
10188 if ((code == EQ || code == NE) && ! unsignedp
10189 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10191 if (GET_CODE (op1) == CONST_INT
10192 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10193 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10198 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10200 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10203 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10204 The decision as to signed or unsigned comparison must be made by the caller.
10206 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10209 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10210 size of MODE should be used. */
10213 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10214 if_false_label, if_true_label)
10216 enum rtx_code code;
10218 enum machine_mode mode;
10220 unsigned int align;
10221 rtx if_false_label, if_true_label;
10224 int dummy_true_label = 0;
10226 /* Reverse the comparison if that is safe and we want to jump if it is
10228 if (! if_true_label && ! FLOAT_MODE_P (mode))
10230 if_true_label = if_false_label;
10231 if_false_label = 0;
10232 code = reverse_condition (code);
10235 /* If one operand is constant, make it the second one. Only do this
10236 if the other operand is not constant as well. */
10238 if (swap_commutative_operands_p (op0, op1))
10243 code = swap_condition (code);
10246 if (flag_force_mem)
10248 op0 = force_not_mem (op0);
10249 op1 = force_not_mem (op1);
10252 do_pending_stack_adjust ();
10254 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10255 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10257 if (tem == const_true_rtx)
10260 emit_jump (if_true_label);
10264 if (if_false_label)
10265 emit_jump (if_false_label);
10271 /* There's no need to do this now that combine.c can eliminate lots of
10272 sign extensions. This can be less efficient in certain cases on other
10275 /* If this is a signed equality comparison, we can do it as an
10276 unsigned comparison since zero-extension is cheaper than sign
10277 extension and comparisons with zero are done as unsigned. This is
10278 the case even on machines that can do fast sign extension, since
10279 zero-extension is easier to combine with other operations than
10280 sign-extension is. If we are comparing against a constant, we must
10281 convert it to what it would look like unsigned. */
10282 if ((code == EQ || code == NE) && ! unsignedp
10283 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10285 if (GET_CODE (op1) == CONST_INT
10286 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10287 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10292 if (! if_true_label)
10294 dummy_true_label = 1;
10295 if_true_label = gen_label_rtx ();
10298 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10301 if (if_false_label)
10302 emit_jump (if_false_label);
10303 if (dummy_true_label)
10304 emit_label (if_true_label);
10307 /* Generate code for a comparison expression EXP (including code to compute
10308 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10309 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10310 generated code will drop through.
10311 SIGNED_CODE should be the rtx operation for this comparison for
10312 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10314 We force a stack adjustment unless there are currently
10315 things pushed on the stack that aren't yet used. */
10318 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10321 enum rtx_code signed_code, unsigned_code;
10322 rtx if_false_label, if_true_label;
10324 unsigned int align0, align1;
10327 enum machine_mode mode;
10329 enum rtx_code code;
10331 /* Don't crash if the comparison was erroneous. */
10332 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10333 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10336 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10337 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10340 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10341 mode = TYPE_MODE (type);
10342 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10343 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10344 || (GET_MODE_BITSIZE (mode)
10345 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10348 /* op0 might have been replaced by promoted constant, in which
10349 case the type of second argument should be used. */
10350 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10351 mode = TYPE_MODE (type);
10353 unsignedp = TREE_UNSIGNED (type);
10354 code = unsignedp ? unsigned_code : signed_code;
10356 #ifdef HAVE_canonicalize_funcptr_for_compare
10357 /* If function pointers need to be "canonicalized" before they can
10358 be reliably compared, then canonicalize them. */
10359 if (HAVE_canonicalize_funcptr_for_compare
10360 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10361 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10364 rtx new_op0 = gen_reg_rtx (mode);
10366 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10370 if (HAVE_canonicalize_funcptr_for_compare
10371 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10372 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10375 rtx new_op1 = gen_reg_rtx (mode);
10377 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10382 /* Do any postincrements in the expression that was tested. */
10385 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10387 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10388 MIN (align0, align1),
10389 if_false_label, if_true_label);
10392 /* Generate code to calculate EXP using a store-flag instruction
10393 and return an rtx for the result. EXP is either a comparison
10394 or a TRUTH_NOT_EXPR whose operand is a comparison.
10396 If TARGET is nonzero, store the result there if convenient.
10398 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10401 Return zero if there is no suitable set-flag instruction
10402 available on this machine.
10404 Once expand_expr has been called on the arguments of the comparison,
10405 we are committed to doing the store flag, since it is not safe to
10406 re-evaluate the expression. We emit the store-flag insn by calling
10407 emit_store_flag, but only expand the arguments if we have a reason
10408 to believe that emit_store_flag will be successful. If we think that
10409 it will, but it isn't, we have to simulate the store-flag with a
10410 set/jump/set sequence. */
10413 do_store_flag (exp, target, mode, only_cheap)
10416 enum machine_mode mode;
10419 enum rtx_code code;
10420 tree arg0, arg1, type;
10422 enum machine_mode operand_mode;
10426 enum insn_code icode;
10427 rtx subtarget = target;
10430 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10431 result at the end. We can't simply invert the test since it would
10432 have already been inverted if it were valid. This case occurs for
10433 some floating-point comparisons. */
10435 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10436 invert = 1, exp = TREE_OPERAND (exp, 0);
10438 arg0 = TREE_OPERAND (exp, 0);
10439 arg1 = TREE_OPERAND (exp, 1);
10441 /* Don't crash if the comparison was erroneous. */
10442 if (arg0 == error_mark_node || arg1 == error_mark_node)
10445 type = TREE_TYPE (arg0);
10446 operand_mode = TYPE_MODE (type);
10447 unsignedp = TREE_UNSIGNED (type);
10449 /* We won't bother with BLKmode store-flag operations because it would mean
10450 passing a lot of information to emit_store_flag. */
10451 if (operand_mode == BLKmode)
10454 /* We won't bother with store-flag operations involving function pointers
10455 when function pointers must be canonicalized before comparisons. */
10456 #ifdef HAVE_canonicalize_funcptr_for_compare
10457 if (HAVE_canonicalize_funcptr_for_compare
10458 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10459 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10461 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10462 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10463 == FUNCTION_TYPE))))
10470 /* Get the rtx comparison code to use. We know that EXP is a comparison
10471 operation of some type. Some comparisons against 1 and -1 can be
10472 converted to comparisons with zero. Do so here so that the tests
10473 below will be aware that we have a comparison with zero. These
10474 tests will not catch constants in the first operand, but constants
10475 are rarely passed as the first operand. */
10477 switch (TREE_CODE (exp))
10486 if (integer_onep (arg1))
10487 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10489 code = unsignedp ? LTU : LT;
10492 if (! unsignedp && integer_all_onesp (arg1))
10493 arg1 = integer_zero_node, code = LT;
10495 code = unsignedp ? LEU : LE;
10498 if (! unsignedp && integer_all_onesp (arg1))
10499 arg1 = integer_zero_node, code = GE;
10501 code = unsignedp ? GTU : GT;
10504 if (integer_onep (arg1))
10505 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10507 code = unsignedp ? GEU : GE;
10510 case UNORDERED_EXPR:
10536 /* Put a constant second. */
10537 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10539 tem = arg0; arg0 = arg1; arg1 = tem;
10540 code = swap_condition (code);
10543 /* If this is an equality or inequality test of a single bit, we can
10544 do this by shifting the bit being tested to the low-order bit and
10545 masking the result with the constant 1. If the condition was EQ,
10546 we xor it with 1. This does not require an scc insn and is faster
10547 than an scc insn even if we have it. */
10549 if ((code == NE || code == EQ)
10550 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10551 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10553 tree inner = TREE_OPERAND (arg0, 0);
10554 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10557 /* If INNER is a right shift of a constant and it plus BITNUM does
10558 not overflow, adjust BITNUM and INNER. */
10560 if (TREE_CODE (inner) == RSHIFT_EXPR
10561 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10562 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10563 && bitnum < TYPE_PRECISION (type)
10564 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10565 bitnum - TYPE_PRECISION (type)))
10567 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10568 inner = TREE_OPERAND (inner, 0);
10571 /* If we are going to be able to omit the AND below, we must do our
10572 operations as unsigned. If we must use the AND, we have a choice.
10573 Normally unsigned is faster, but for some machines signed is. */
10574 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10575 #ifdef LOAD_EXTEND_OP
10576 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10582 if (! get_subtarget (subtarget)
10583 || GET_MODE (subtarget) != operand_mode
10584 || ! safe_from_p (subtarget, inner, 1))
10587 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10590 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10591 size_int (bitnum), subtarget, ops_unsignedp);
10593 if (GET_MODE (op0) != mode)
10594 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10596 if ((code == EQ && ! invert) || (code == NE && invert))
10597 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10598 ops_unsignedp, OPTAB_LIB_WIDEN);
10600 /* Put the AND last so it can combine with more things. */
10601 if (bitnum != TYPE_PRECISION (type) - 1)
10602 op0 = expand_and (op0, const1_rtx, subtarget);
10607 /* Now see if we are likely to be able to do this. Return if not. */
10608 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10611 icode = setcc_gen_code[(int) code];
10612 if (icode == CODE_FOR_nothing
10613 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10615 /* We can only do this if it is one of the special cases that
10616 can be handled without an scc insn. */
10617 if ((code == LT && integer_zerop (arg1))
10618 || (! only_cheap && code == GE && integer_zerop (arg1)))
10620 else if (BRANCH_COST >= 0
10621 && ! only_cheap && (code == NE || code == EQ)
10622 && TREE_CODE (type) != REAL_TYPE
10623 && ((abs_optab->handlers[(int) operand_mode].insn_code
10624 != CODE_FOR_nothing)
10625 || (ffs_optab->handlers[(int) operand_mode].insn_code
10626 != CODE_FOR_nothing)))
10632 if (! get_subtarget (target)
10633 || GET_MODE (subtarget) != operand_mode
10634 || ! safe_from_p (subtarget, arg1, 1))
10637 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10638 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10641 target = gen_reg_rtx (mode);
10643 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10644 because, if the emit_store_flag does anything it will succeed and
10645 OP0 and OP1 will not be used subsequently. */
10647 result = emit_store_flag (target, code,
10648 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10649 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10650 operand_mode, unsignedp, 1);
10655 result = expand_binop (mode, xor_optab, result, const1_rtx,
10656 result, 0, OPTAB_LIB_WIDEN);
10660 /* If this failed, we have to do this with set/compare/jump/set code. */
10661 if (GET_CODE (target) != REG
10662 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10663 target = gen_reg_rtx (GET_MODE (target));
10665 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10666 result = compare_from_rtx (op0, op1, code, unsignedp,
10667 operand_mode, NULL_RTX, 0);
10668 if (GET_CODE (result) == CONST_INT)
10669 return (((result == const0_rtx && ! invert)
10670 || (result != const0_rtx && invert))
10671 ? const0_rtx : const1_rtx);
10673 label = gen_label_rtx ();
10674 if (bcc_gen_fctn[(int) code] == 0)
10677 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10678 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10679 emit_label (label);
10685 /* Stubs in case we haven't got a casesi insn. */
10686 #ifndef HAVE_casesi
10687 # define HAVE_casesi 0
10688 # define gen_casesi(a, b, c, d, e) (0)
10689 # define CODE_FOR_casesi CODE_FOR_nothing
10692 /* If the machine does not have a case insn that compares the bounds,
10693 this means extra overhead for dispatch tables, which raises the
10694 threshold for using them. */
10695 #ifndef CASE_VALUES_THRESHOLD
10696 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10697 #endif /* CASE_VALUES_THRESHOLD */
10700 case_values_threshold ()
10702 return CASE_VALUES_THRESHOLD;
10705 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10706 0 otherwise (i.e. if there is no casesi instruction). */
10708 try_casesi (index_type, index_expr, minval, range,
10709 table_label, default_label)
10710 tree index_type, index_expr, minval, range;
10711 rtx table_label ATTRIBUTE_UNUSED;
10714 enum machine_mode index_mode = SImode;
10715 int index_bits = GET_MODE_BITSIZE (index_mode);
10716 rtx op1, op2, index;
10717 enum machine_mode op_mode;
10722 /* Convert the index to SImode. */
10723 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10725 enum machine_mode omode = TYPE_MODE (index_type);
10726 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10728 /* We must handle the endpoints in the original mode. */
10729 index_expr = build (MINUS_EXPR, index_type,
10730 index_expr, minval);
10731 minval = integer_zero_node;
10732 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10733 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10734 omode, 1, 0, default_label);
10735 /* Now we can safely truncate. */
10736 index = convert_to_mode (index_mode, index, 0);
10740 if (TYPE_MODE (index_type) != index_mode)
10742 index_expr = convert (type_for_size (index_bits, 0),
10744 index_type = TREE_TYPE (index_expr);
10747 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10750 index = protect_from_queue (index, 0);
10751 do_pending_stack_adjust ();
10753 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10754 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10756 index = copy_to_mode_reg (op_mode, index);
10758 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10760 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10761 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10762 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10763 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10765 op1 = copy_to_mode_reg (op_mode, op1);
10767 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10769 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10770 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10771 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10772 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10774 op2 = copy_to_mode_reg (op_mode, op2);
10776 emit_jump_insn (gen_casesi (index, op1, op2,
10777 table_label, default_label));
10781 /* Attempt to generate a tablejump instruction; same concept. */
10782 #ifndef HAVE_tablejump
10783 #define HAVE_tablejump 0
10784 #define gen_tablejump(x, y) (0)
10787 /* Subroutine of the next function.
10789 INDEX is the value being switched on, with the lowest value
10790 in the table already subtracted.
10791 MODE is its expected mode (needed if INDEX is constant).
10792 RANGE is the length of the jump table.
10793 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10795 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10796 index value is out of range. */
10799 do_tablejump (index, mode, range, table_label, default_label)
10800 rtx index, range, table_label, default_label;
10801 enum machine_mode mode;
10805 /* Do an unsigned comparison (in the proper mode) between the index
10806 expression and the value which represents the length of the range.
10807 Since we just finished subtracting the lower bound of the range
10808 from the index expression, this comparison allows us to simultaneously
10809 check that the original index expression value is both greater than
10810 or equal to the minimum value of the range and less than or equal to
10811 the maximum value of the range. */
10813 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10816 /* If index is in range, it must fit in Pmode.
10817 Convert to Pmode so we can index with it. */
10819 index = convert_to_mode (Pmode, index, 1);
10821 /* Don't let a MEM slip thru, because then INDEX that comes
10822 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10823 and break_out_memory_refs will go to work on it and mess it up. */
10824 #ifdef PIC_CASE_VECTOR_ADDRESS
10825 if (flag_pic && GET_CODE (index) != REG)
10826 index = copy_to_mode_reg (Pmode, index);
10829 /* If flag_force_addr were to affect this address
10830 it could interfere with the tricky assumptions made
10831 about addresses that contain label-refs,
10832 which may be valid only very near the tablejump itself. */
10833 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10834 GET_MODE_SIZE, because this indicates how large insns are. The other
10835 uses should all be Pmode, because they are addresses. This code
10836 could fail if addresses and insns are not the same size. */
10837 index = gen_rtx_PLUS (Pmode,
10838 gen_rtx_MULT (Pmode, index,
10839 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10840 gen_rtx_LABEL_REF (Pmode, table_label));
10841 #ifdef PIC_CASE_VECTOR_ADDRESS
10843 index = PIC_CASE_VECTOR_ADDRESS (index);
10846 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10847 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10848 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10849 RTX_UNCHANGING_P (vector) = 1;
10850 convert_move (temp, vector, 0);
10852 emit_jump_insn (gen_tablejump (temp, table_label));
10854 /* If we are generating PIC code or if the table is PC-relative, the
10855 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10856 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10861 try_tablejump (index_type, index_expr, minval, range,
10862 table_label, default_label)
10863 tree index_type, index_expr, minval, range;
10864 rtx table_label, default_label;
10868 if (! HAVE_tablejump)
10871 index_expr = fold (build (MINUS_EXPR, index_type,
10872 convert (index_type, index_expr),
10873 convert (index_type, minval)));
10874 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10876 index = protect_from_queue (index, 0);
10877 do_pending_stack_adjust ();
10879 do_tablejump (index, TYPE_MODE (index_type),
10880 convert_modes (TYPE_MODE (index_type),
10881 TYPE_MODE (TREE_TYPE (range)),
10882 expand_expr (range, NULL_RTX,
10884 TREE_UNSIGNED (TREE_TYPE (range))),
10885 table_label, default_label);