1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first. */
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
65 #define STACK_PUSH_CODE PRE_INC
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
74 /* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
81 int (*lang_safe_from_p) PARAMS ((rtx, tree));
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* Don't check memory usage, since code is being emitted to check a memory
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
94 static int in_check_memory_usage;
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
99 /* This structure is used by move_by_pieces to describe the move to
101 struct move_by_pieces
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
116 /* This structure is used by store_by_pieces to describe the clear to
119 struct store_by_pieces
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
132 extern struct obstack permanent_obstack;
134 static rtx get_push_address PARAMS ((int));
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
140 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
150 struct store_by_pieces *));
151 static rtx get_subtarget PARAMS ((rtx));
152 static int is_zeros_p PARAMS ((tree));
153 static int mostly_zeros_p PARAMS ((tree));
154 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, int, int));
157 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
158 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int,
161 HOST_WIDE_INT, int));
162 static enum memory_use_mode
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164 static rtx var_rtx PARAMS ((tree));
165 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
166 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
167 static rtx expand_increment PARAMS ((tree, int, int));
168 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
169 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
170 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
172 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
174 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
176 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
178 /* Record for each mode whether we can move a register directly to or
179 from an object of that mode in memory. If we can't, we won't try
180 to use that mode directly when accessing a field of that mode. */
182 static char direct_load[NUM_MACHINE_MODES];
183 static char direct_store[NUM_MACHINE_MODES];
185 /* If a memory-to-memory move would take MOVE_RATIO or more simple
186 move-instruction sequences, we will do a movstr or libcall instead. */
189 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
192 /* If we are optimizing for space (-Os), cut down the default move ratio. */
193 #define MOVE_RATIO (optimize_size ? 3 : 15)
197 /* This macro is used to determine whether move_by_pieces should be called
198 to perform a structure copy. */
199 #ifndef MOVE_BY_PIECES_P
200 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab[NUM_MACHINE_MODES];
207 /* This array records the insn_code of insns to perform block clears. */
208 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
223 enum machine_mode mode;
229 /* Try indexing by frame ptr and try by stack ptr.
230 It is known that on the Convex the stack ptr isn't a valid index.
231 With luck, one or the other is valid on any machine. */
232 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
233 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
235 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
236 pat = PATTERN (insn);
238 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
239 mode = (enum machine_mode) ((int) mode + 1))
244 direct_load[(int) mode] = direct_store[(int) mode] = 0;
245 PUT_MODE (mem, mode);
246 PUT_MODE (mem1, mode);
248 /* See if there is some register that can be used in this mode and
249 directly loaded or stored from memory. */
251 if (mode != VOIDmode && mode != BLKmode)
252 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
253 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
256 if (! HARD_REGNO_MODE_OK (regno, mode))
259 reg = gen_rtx_REG (mode, regno);
262 SET_DEST (pat) = reg;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_load[(int) mode] = 1;
266 SET_SRC (pat) = mem1;
267 SET_DEST (pat) = reg;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_load[(int) mode] = 1;
272 SET_DEST (pat) = mem;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_store[(int) mode] = 1;
277 SET_DEST (pat) = mem1;
278 if (recog (pat, insn, &num_clobbers) >= 0)
279 direct_store[(int) mode] = 1;
286 /* This is run at the start of compiling a function. */
291 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
294 pending_stack_adjust = 0;
295 stack_pointer_delta = 0;
296 inhibit_defer_pop = 0;
298 apply_args_value = 0;
304 struct expr_status *p;
309 ggc_mark_rtx (p->x_saveregs_value);
310 ggc_mark_rtx (p->x_apply_args_value);
311 ggc_mark_rtx (p->x_forced_labels);
322 /* Small sanity check that the queue is empty at the end of a function. */
325 finish_expr_for_function ()
331 /* Manage the queue of increment instructions to be output
332 for POSTINCREMENT_EXPR expressions, etc. */
334 /* Queue up to increment (or change) VAR later. BODY says how:
335 BODY should be the same thing you would pass to emit_insn
336 to increment right away. It will go to emit_insn later on.
338 The value is a QUEUED expression to be used in place of VAR
339 where you want to guarantee the pre-incrementation value of VAR. */
342 enqueue_insn (var, body)
345 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
346 body, pending_chain);
347 return pending_chain;
350 /* Use protect_from_queue to convert a QUEUED expression
351 into something that you can put immediately into an instruction.
352 If the queued incrementation has not happened yet,
353 protect_from_queue returns the variable itself.
354 If the incrementation has happened, protect_from_queue returns a temp
355 that contains a copy of the old value of the variable.
357 Any time an rtx which might possibly be a QUEUED is to be put
358 into an instruction, it must be passed through protect_from_queue first.
359 QUEUED expressions are not meaningful in instructions.
361 Do not pass a value through protect_from_queue and then hold
362 on to it for a while before putting it in an instruction!
363 If the queue is flushed in between, incorrect code will result. */
366 protect_from_queue (x, modify)
370 RTX_CODE code = GET_CODE (x);
372 #if 0 /* A QUEUED can hang around after the queue is forced out. */
373 /* Shortcut for most common case. */
374 if (pending_chain == 0)
380 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
381 use of autoincrement. Make a copy of the contents of the memory
382 location rather than a copy of the address, but not if the value is
383 of mode BLKmode. Don't modify X in place since it might be
385 if (code == MEM && GET_MODE (x) != BLKmode
386 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
389 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
393 rtx temp = gen_reg_rtx (GET_MODE (x));
395 emit_insn_before (gen_move_insn (temp, new),
400 /* Copy the address into a pseudo, so that the returned value
401 remains correct across calls to emit_queue. */
402 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
405 /* Otherwise, recursively protect the subexpressions of all
406 the kinds of rtx's that can contain a QUEUED. */
409 rtx tem = protect_from_queue (XEXP (x, 0), 0);
410 if (tem != XEXP (x, 0))
416 else if (code == PLUS || code == MULT)
418 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
419 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
420 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
429 /* If the increment has not happened, use the variable itself. Copy it
430 into a new pseudo so that the value remains correct across calls to
432 if (QUEUED_INSN (x) == 0)
433 return copy_to_reg (QUEUED_VAR (x));
434 /* If the increment has happened and a pre-increment copy exists,
436 if (QUEUED_COPY (x) != 0)
437 return QUEUED_COPY (x);
438 /* The increment has happened but we haven't set up a pre-increment copy.
439 Set one up now, and use it. */
440 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
441 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
443 return QUEUED_COPY (x);
446 /* Return nonzero if X contains a QUEUED expression:
447 if it contains anything that will be altered by a queued increment.
448 We handle only combinations of MEM, PLUS, MINUS and MULT operators
449 since memory addresses generally contain only those. */
455 enum rtx_code code = GET_CODE (x);
461 return queued_subexp_p (XEXP (x, 0));
465 return (queued_subexp_p (XEXP (x, 0))
466 || queued_subexp_p (XEXP (x, 1)));
472 /* Perform all the pending incrementations. */
478 while ((p = pending_chain))
480 rtx body = QUEUED_BODY (p);
482 if (GET_CODE (body) == SEQUENCE)
484 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
485 emit_insn (QUEUED_BODY (p));
488 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
489 pending_chain = QUEUED_NEXT (p);
493 /* Copy data from FROM to TO, where the machine modes are not the same.
494 Both modes may be integer, or both may be floating.
495 UNSIGNEDP should be nonzero if FROM is an unsigned type.
496 This causes zero-extension instead of sign-extension. */
499 convert_move (to, from, unsignedp)
503 enum machine_mode to_mode = GET_MODE (to);
504 enum machine_mode from_mode = GET_MODE (from);
505 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
506 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
510 /* rtx code for making an equivalent value. */
511 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
513 to = protect_from_queue (to, 1);
514 from = protect_from_queue (from, 0);
516 if (to_real != from_real)
519 /* If FROM is a SUBREG that indicates that we have already done at least
520 the required extension, strip it. We don't handle such SUBREGs as
523 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
524 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
525 >= GET_MODE_SIZE (to_mode))
526 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
527 from = gen_lowpart (to_mode, from), from_mode = to_mode;
529 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
532 if (to_mode == from_mode
533 || (from_mode == VOIDmode && CONSTANT_P (from)))
535 emit_move_insn (to, from);
539 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
541 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
544 if (VECTOR_MODE_P (to_mode))
545 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
547 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
549 emit_move_insn (to, from);
553 if (to_real != from_real)
560 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
562 /* Try converting directly if the insn is supported. */
563 if ((code = can_extend_p (to_mode, from_mode, 0))
566 emit_unop_insn (code, to, from, UNKNOWN);
571 #ifdef HAVE_trunchfqf2
572 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
574 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
578 #ifdef HAVE_trunctqfqf2
579 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
581 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
585 #ifdef HAVE_truncsfqf2
586 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
588 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
592 #ifdef HAVE_truncdfqf2
593 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
595 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
599 #ifdef HAVE_truncxfqf2
600 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
602 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
606 #ifdef HAVE_trunctfqf2
607 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
609 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
614 #ifdef HAVE_trunctqfhf2
615 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
617 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
621 #ifdef HAVE_truncsfhf2
622 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
624 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
628 #ifdef HAVE_truncdfhf2
629 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
631 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
635 #ifdef HAVE_truncxfhf2
636 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
638 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
642 #ifdef HAVE_trunctfhf2
643 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
645 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
650 #ifdef HAVE_truncsftqf2
651 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
653 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
657 #ifdef HAVE_truncdftqf2
658 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
660 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
664 #ifdef HAVE_truncxftqf2
665 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
667 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
671 #ifdef HAVE_trunctftqf2
672 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
674 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
679 #ifdef HAVE_truncdfsf2
680 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
682 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
686 #ifdef HAVE_truncxfsf2
687 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
689 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
693 #ifdef HAVE_trunctfsf2
694 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
696 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
700 #ifdef HAVE_truncxfdf2
701 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
703 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
707 #ifdef HAVE_trunctfdf2
708 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
710 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
722 libcall = extendsfdf2_libfunc;
726 libcall = extendsfxf2_libfunc;
730 libcall = extendsftf2_libfunc;
742 libcall = truncdfsf2_libfunc;
746 libcall = extenddfxf2_libfunc;
750 libcall = extenddftf2_libfunc;
762 libcall = truncxfsf2_libfunc;
766 libcall = truncxfdf2_libfunc;
778 libcall = trunctfsf2_libfunc;
782 libcall = trunctfdf2_libfunc;
794 if (libcall == (rtx) 0)
795 /* This conversion is not implemented yet. */
799 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
801 insns = get_insns ();
803 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
808 /* Now both modes are integers. */
810 /* Handle expanding beyond a word. */
811 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
812 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
819 enum machine_mode lowpart_mode;
820 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
822 /* Try converting directly if the insn is supported. */
823 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
826 /* If FROM is a SUBREG, put it into a register. Do this
827 so that we always generate the same set of insns for
828 better cse'ing; if an intermediate assignment occurred,
829 we won't be doing the operation directly on the SUBREG. */
830 if (optimize > 0 && GET_CODE (from) == SUBREG)
831 from = force_reg (from_mode, from);
832 emit_unop_insn (code, to, from, equiv_code);
835 /* Next, try converting via full word. */
836 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
837 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
838 != CODE_FOR_nothing))
840 if (GET_CODE (to) == REG)
841 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
842 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
843 emit_unop_insn (code, to,
844 gen_lowpart (word_mode, to), equiv_code);
848 /* No special multiword conversion insn; do it by hand. */
851 /* Since we will turn this into a no conflict block, we must ensure
852 that the source does not overlap the target. */
854 if (reg_overlap_mentioned_p (to, from))
855 from = force_reg (from_mode, from);
857 /* Get a copy of FROM widened to a word, if necessary. */
858 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
859 lowpart_mode = word_mode;
861 lowpart_mode = from_mode;
863 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
865 lowpart = gen_lowpart (lowpart_mode, to);
866 emit_move_insn (lowpart, lowfrom);
868 /* Compute the value to put in each remaining word. */
870 fill_value = const0_rtx;
875 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
876 && STORE_FLAG_VALUE == -1)
878 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
880 fill_value = gen_reg_rtx (word_mode);
881 emit_insn (gen_slt (fill_value));
887 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
888 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
890 fill_value = convert_to_mode (word_mode, fill_value, 1);
894 /* Fill the remaining words. */
895 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
897 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
898 rtx subword = operand_subword (to, index, 1, to_mode);
903 if (fill_value != subword)
904 emit_move_insn (subword, fill_value);
907 insns = get_insns ();
910 emit_no_conflict_block (insns, to, from, NULL_RTX,
911 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
915 /* Truncating multi-word to a word or less. */
916 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
917 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
919 if (!((GET_CODE (from) == MEM
920 && ! MEM_VOLATILE_P (from)
921 && direct_load[(int) to_mode]
922 && ! mode_dependent_address_p (XEXP (from, 0)))
923 || GET_CODE (from) == REG
924 || GET_CODE (from) == SUBREG))
925 from = force_reg (from_mode, from);
926 convert_move (to, gen_lowpart (word_mode, from), 0);
930 /* Handle pointer conversion. */ /* SPEE 900220. */
931 if (to_mode == PQImode)
933 if (from_mode != QImode)
934 from = convert_to_mode (QImode, from, unsignedp);
936 #ifdef HAVE_truncqipqi2
937 if (HAVE_truncqipqi2)
939 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
942 #endif /* HAVE_truncqipqi2 */
946 if (from_mode == PQImode)
948 if (to_mode != QImode)
950 from = convert_to_mode (QImode, from, unsignedp);
955 #ifdef HAVE_extendpqiqi2
956 if (HAVE_extendpqiqi2)
958 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
961 #endif /* HAVE_extendpqiqi2 */
966 if (to_mode == PSImode)
968 if (from_mode != SImode)
969 from = convert_to_mode (SImode, from, unsignedp);
971 #ifdef HAVE_truncsipsi2
972 if (HAVE_truncsipsi2)
974 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
977 #endif /* HAVE_truncsipsi2 */
981 if (from_mode == PSImode)
983 if (to_mode != SImode)
985 from = convert_to_mode (SImode, from, unsignedp);
990 #ifdef HAVE_extendpsisi2
991 if (! unsignedp && HAVE_extendpsisi2)
993 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
996 #endif /* HAVE_extendpsisi2 */
997 #ifdef HAVE_zero_extendpsisi2
998 if (unsignedp && HAVE_zero_extendpsisi2)
1000 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1003 #endif /* HAVE_zero_extendpsisi2 */
1008 if (to_mode == PDImode)
1010 if (from_mode != DImode)
1011 from = convert_to_mode (DImode, from, unsignedp);
1013 #ifdef HAVE_truncdipdi2
1014 if (HAVE_truncdipdi2)
1016 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1019 #endif /* HAVE_truncdipdi2 */
1023 if (from_mode == PDImode)
1025 if (to_mode != DImode)
1027 from = convert_to_mode (DImode, from, unsignedp);
1032 #ifdef HAVE_extendpdidi2
1033 if (HAVE_extendpdidi2)
1035 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1038 #endif /* HAVE_extendpdidi2 */
1043 /* Now follow all the conversions between integers
1044 no more than a word long. */
1046 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1047 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1048 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1049 GET_MODE_BITSIZE (from_mode)))
1051 if (!((GET_CODE (from) == MEM
1052 && ! MEM_VOLATILE_P (from)
1053 && direct_load[(int) to_mode]
1054 && ! mode_dependent_address_p (XEXP (from, 0)))
1055 || GET_CODE (from) == REG
1056 || GET_CODE (from) == SUBREG))
1057 from = force_reg (from_mode, from);
1058 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1059 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1060 from = copy_to_reg (from);
1061 emit_move_insn (to, gen_lowpart (to_mode, from));
1065 /* Handle extension. */
1066 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1068 /* Convert directly if that works. */
1069 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1070 != CODE_FOR_nothing)
1072 emit_unop_insn (code, to, from, equiv_code);
1077 enum machine_mode intermediate;
1081 /* Search for a mode to convert via. */
1082 for (intermediate = from_mode; intermediate != VOIDmode;
1083 intermediate = GET_MODE_WIDER_MODE (intermediate))
1084 if (((can_extend_p (to_mode, intermediate, unsignedp)
1085 != CODE_FOR_nothing)
1086 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1087 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1088 GET_MODE_BITSIZE (intermediate))))
1089 && (can_extend_p (intermediate, from_mode, unsignedp)
1090 != CODE_FOR_nothing))
1092 convert_move (to, convert_to_mode (intermediate, from,
1093 unsignedp), unsignedp);
1097 /* No suitable intermediate mode.
1098 Generate what we need with shifts. */
1099 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1100 - GET_MODE_BITSIZE (from_mode), 0);
1101 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1102 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1104 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1107 emit_move_insn (to, tmp);
1112 /* Support special truncate insns for certain modes. */
1114 if (from_mode == DImode && to_mode == SImode)
1116 #ifdef HAVE_truncdisi2
1117 if (HAVE_truncdisi2)
1119 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1127 if (from_mode == DImode && to_mode == HImode)
1129 #ifdef HAVE_truncdihi2
1130 if (HAVE_truncdihi2)
1132 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1140 if (from_mode == DImode && to_mode == QImode)
1142 #ifdef HAVE_truncdiqi2
1143 if (HAVE_truncdiqi2)
1145 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1153 if (from_mode == SImode && to_mode == HImode)
1155 #ifdef HAVE_truncsihi2
1156 if (HAVE_truncsihi2)
1158 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1166 if (from_mode == SImode && to_mode == QImode)
1168 #ifdef HAVE_truncsiqi2
1169 if (HAVE_truncsiqi2)
1171 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 if (from_mode == HImode && to_mode == QImode)
1181 #ifdef HAVE_trunchiqi2
1182 if (HAVE_trunchiqi2)
1184 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 if (from_mode == TImode && to_mode == DImode)
1194 #ifdef HAVE_trunctidi2
1195 if (HAVE_trunctidi2)
1197 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 if (from_mode == TImode && to_mode == SImode)
1207 #ifdef HAVE_trunctisi2
1208 if (HAVE_trunctisi2)
1210 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1218 if (from_mode == TImode && to_mode == HImode)
1220 #ifdef HAVE_trunctihi2
1221 if (HAVE_trunctihi2)
1223 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1227 convert_move (to, force_reg (from_mode, from), unsignedp);
1231 if (from_mode == TImode && to_mode == QImode)
1233 #ifdef HAVE_trunctiqi2
1234 if (HAVE_trunctiqi2)
1236 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1240 convert_move (to, force_reg (from_mode, from), unsignedp);
1244 /* Handle truncation of volatile memrefs, and so on;
1245 the things that couldn't be truncated directly,
1246 and for which there was no special instruction. */
1247 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1249 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1250 emit_move_insn (to, temp);
1254 /* Mode combination is not recognized. */
1258 /* Return an rtx for a value that would result
1259 from converting X to mode MODE.
1260 Both X and MODE may be floating, or both integer.
1261 UNSIGNEDP is nonzero if X is an unsigned value.
1262 This can be done by referring to a part of X in place
1263 or by copying to a new temporary with conversion.
1265 This function *must not* call protect_from_queue
1266 except when putting X into an insn (in which case convert_move does it). */
1269 convert_to_mode (mode, x, unsignedp)
1270 enum machine_mode mode;
1274 return convert_modes (mode, VOIDmode, x, unsignedp);
1277 /* Return an rtx for a value that would result
1278 from converting X from mode OLDMODE to mode MODE.
1279 Both modes may be floating, or both integer.
1280 UNSIGNEDP is nonzero if X is an unsigned value.
1282 This can be done by referring to a part of X in place
1283 or by copying to a new temporary with conversion.
1285 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1287 This function *must not* call protect_from_queue
1288 except when putting X into an insn (in which case convert_move does it). */
1291 convert_modes (mode, oldmode, x, unsignedp)
1292 enum machine_mode mode, oldmode;
1298 /* If FROM is a SUBREG that indicates that we have already done at least
1299 the required extension, strip it. */
1301 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1302 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1303 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1304 x = gen_lowpart (mode, x);
1306 if (GET_MODE (x) != VOIDmode)
1307 oldmode = GET_MODE (x);
1309 if (mode == oldmode)
1312 /* There is one case that we must handle specially: If we are converting
1313 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1314 we are to interpret the constant as unsigned, gen_lowpart will do
1315 the wrong if the constant appears negative. What we want to do is
1316 make the high-order word of the constant zero, not all ones. */
1318 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1319 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1320 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1322 HOST_WIDE_INT val = INTVAL (x);
1324 if (oldmode != VOIDmode
1325 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1327 int width = GET_MODE_BITSIZE (oldmode);
1329 /* We need to zero extend VAL. */
1330 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1333 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1336 /* We can do this with a gen_lowpart if both desired and current modes
1337 are integer, and this is either a constant integer, a register, or a
1338 non-volatile MEM. Except for the constant case where MODE is no
1339 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1341 if ((GET_CODE (x) == CONST_INT
1342 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1343 || (GET_MODE_CLASS (mode) == MODE_INT
1344 && GET_MODE_CLASS (oldmode) == MODE_INT
1345 && (GET_CODE (x) == CONST_DOUBLE
1346 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1347 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1348 && direct_load[(int) mode])
1349 || (GET_CODE (x) == REG
1350 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1351 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1353 /* ?? If we don't know OLDMODE, we have to assume here that
1354 X does not need sign- or zero-extension. This may not be
1355 the case, but it's the best we can do. */
1356 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1357 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1359 HOST_WIDE_INT val = INTVAL (x);
1360 int width = GET_MODE_BITSIZE (oldmode);
1362 /* We must sign or zero-extend in this case. Start by
1363 zero-extending, then sign extend if we need to. */
1364 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1366 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1367 val |= (HOST_WIDE_INT) (-1) << width;
1369 return GEN_INT (trunc_int_for_mode (val, mode));
1372 return gen_lowpart (mode, x);
1375 temp = gen_reg_rtx (mode);
1376 convert_move (temp, x, unsignedp);
1380 /* This macro is used to determine what the largest unit size that
1381 move_by_pieces can use is. */
1383 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1384 move efficiently, as opposed to MOVE_MAX which is the maximum
1385 number of bytes we can move with a single instruction. */
1387 #ifndef MOVE_MAX_PIECES
1388 #define MOVE_MAX_PIECES MOVE_MAX
1391 /* Generate several move instructions to copy LEN bytes from block FROM to
1392 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1393 and TO through protect_from_queue before calling.
1395 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1396 used to push FROM to the stack.
1398 ALIGN is maximum alignment we can assume. */
1401 move_by_pieces (to, from, len, align)
1403 unsigned HOST_WIDE_INT len;
1406 struct move_by_pieces data;
1407 rtx to_addr, from_addr = XEXP (from, 0);
1408 unsigned int max_size = MOVE_MAX_PIECES + 1;
1409 enum machine_mode mode = VOIDmode, tmode;
1410 enum insn_code icode;
1413 data.from_addr = from_addr;
1416 to_addr = XEXP (to, 0);
1419 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1420 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1429 #ifdef STACK_GROWS_DOWNWARD
1435 data.to_addr = to_addr;
1438 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1439 || GET_CODE (from_addr) == POST_INC
1440 || GET_CODE (from_addr) == POST_DEC);
1442 data.explicit_inc_from = 0;
1443 data.explicit_inc_to = 0;
1444 if (data.reverse) data.offset = len;
1447 /* If copying requires more than two move insns,
1448 copy addresses to registers (to make displacements shorter)
1449 and use post-increment if available. */
1450 if (!(data.autinc_from && data.autinc_to)
1451 && move_by_pieces_ninsns (len, align) > 2)
1453 /* Find the mode of the largest move... */
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
1459 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1461 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1462 data.autinc_from = 1;
1463 data.explicit_inc_from = -1;
1465 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1467 data.from_addr = copy_addr_to_reg (from_addr);
1468 data.autinc_from = 1;
1469 data.explicit_inc_from = 1;
1471 if (!data.autinc_from && CONSTANT_P (from_addr))
1472 data.from_addr = copy_addr_to_reg (from_addr);
1473 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1475 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1477 data.explicit_inc_to = -1;
1479 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1481 data.to_addr = copy_addr_to_reg (to_addr);
1483 data.explicit_inc_to = 1;
1485 if (!data.autinc_to && CONSTANT_P (to_addr))
1486 data.to_addr = copy_addr_to_reg (to_addr);
1489 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1490 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1491 align = MOVE_MAX * BITS_PER_UNIT;
1493 /* First move what we can in the largest integer mode, then go to
1494 successively smaller modes. */
1496 while (max_size > 1)
1498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1500 if (GET_MODE_SIZE (tmode) < max_size)
1503 if (mode == VOIDmode)
1506 icode = mov_optab->handlers[(int) mode].insn_code;
1507 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1508 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1510 max_size = GET_MODE_SIZE (mode);
1513 /* The code above should have handled everything. */
1518 /* Return number of insns required to move L bytes by pieces.
1519 ALIGN (in bits) is maximum alignment we can assume. */
1521 static unsigned HOST_WIDE_INT
1522 move_by_pieces_ninsns (l, align)
1523 unsigned HOST_WIDE_INT l;
1526 unsigned HOST_WIDE_INT n_insns = 0;
1527 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1529 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1530 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1531 align = MOVE_MAX * BITS_PER_UNIT;
1533 while (max_size > 1)
1535 enum machine_mode mode = VOIDmode, tmode;
1536 enum insn_code icode;
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
1543 if (mode == VOIDmode)
1546 icode = mov_optab->handlers[(int) mode].insn_code;
1547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1548 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1550 max_size = GET_MODE_SIZE (mode);
1558 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1559 with move instructions for mode MODE. GENFUN is the gen_... function
1560 to make a move insn for that mode. DATA has all the other info. */
1563 move_by_pieces_1 (genfun, mode, data)
1564 rtx (*genfun) PARAMS ((rtx, ...));
1565 enum machine_mode mode;
1566 struct move_by_pieces *data;
1568 unsigned int size = GET_MODE_SIZE (mode);
1569 rtx to1 = NULL_RTX, from1;
1571 while (data->len >= size)
1574 data->offset -= size;
1578 if (data->autinc_to)
1580 to1 = replace_equiv_address (data->to, data->to_addr);
1581 to1 = adjust_address (to1, mode, 0);
1584 to1 = adjust_address (data->to, mode, data->offset);
1587 if (data->autinc_from)
1589 from1 = replace_equiv_address (data->from, data->from_addr);
1590 from1 = adjust_address (from1, mode, 0);
1593 from1 = adjust_address (data->from, mode, data->offset);
1595 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1598 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1601 emit_insn ((*genfun) (to1, from1));
1604 #ifdef PUSH_ROUNDING
1605 emit_single_push_insn (mode, from1, NULL);
1611 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1612 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1613 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1614 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1616 if (! data->reverse)
1617 data->offset += size;
1623 /* Emit code to move a block Y to a block X.
1624 This may be done with string-move instructions,
1625 with multiple scalar move instructions, or with a library call.
1627 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1629 SIZE is an rtx that says how long they are.
1630 ALIGN is the maximum alignment we can assume they have.
1632 Return the address of the new block, if memcpy is called and returns it,
1636 emit_block_move (x, y, size)
1641 #ifdef TARGET_MEM_FUNCTIONS
1643 tree call_expr, arg_list;
1645 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1647 if (GET_MODE (x) != BLKmode)
1650 if (GET_MODE (y) != BLKmode)
1653 x = protect_from_queue (x, 1);
1654 y = protect_from_queue (y, 0);
1655 size = protect_from_queue (size, 0);
1657 if (GET_CODE (x) != MEM)
1659 if (GET_CODE (y) != MEM)
1664 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1665 move_by_pieces (x, y, INTVAL (size), align);
1668 /* Try the most limited insn first, because there's no point
1669 including more than one in the machine description unless
1670 the more limited one has some advantage. */
1672 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1673 enum machine_mode mode;
1675 /* Since this is a move insn, we don't care about volatility. */
1678 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1679 mode = GET_MODE_WIDER_MODE (mode))
1681 enum insn_code code = movstr_optab[(int) mode];
1682 insn_operand_predicate_fn pred;
1684 if (code != CODE_FOR_nothing
1685 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1686 here because if SIZE is less than the mode mask, as it is
1687 returned by the macro, it will definitely be less than the
1688 actual mode mask. */
1689 && ((GET_CODE (size) == CONST_INT
1690 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1691 <= (GET_MODE_MASK (mode) >> 1)))
1692 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1693 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1694 || (*pred) (x, BLKmode))
1695 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1696 || (*pred) (y, BLKmode))
1697 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1698 || (*pred) (opalign, VOIDmode)))
1701 rtx last = get_last_insn ();
1704 op2 = convert_to_mode (mode, size, 1);
1705 pred = insn_data[(int) code].operand[2].predicate;
1706 if (pred != 0 && ! (*pred) (op2, mode))
1707 op2 = copy_to_mode_reg (mode, op2);
1709 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1717 delete_insns_since (last);
1723 /* X, Y, or SIZE may have been passed through protect_from_queue.
1725 It is unsafe to save the value generated by protect_from_queue
1726 and reuse it later. Consider what happens if emit_queue is
1727 called before the return value from protect_from_queue is used.
1729 Expansion of the CALL_EXPR below will call emit_queue before
1730 we are finished emitting RTL for argument setup. So if we are
1731 not careful we could get the wrong value for an argument.
1733 To avoid this problem we go ahead and emit code to copy X, Y &
1734 SIZE into new pseudos. We can then place those new pseudos
1735 into an RTL_EXPR and use them later, even after a call to
1738 Note this is not strictly needed for library calls since they
1739 do not call emit_queue before loading their arguments. However,
1740 we may need to have library calls call emit_queue in the future
1741 since failing to do so could cause problems for targets which
1742 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1743 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1744 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1746 #ifdef TARGET_MEM_FUNCTIONS
1747 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1749 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1750 TREE_UNSIGNED (integer_type_node));
1751 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1754 #ifdef TARGET_MEM_FUNCTIONS
1755 /* It is incorrect to use the libcall calling conventions to call
1756 memcpy in this context.
1758 This could be a user call to memcpy and the user may wish to
1759 examine the return value from memcpy.
1761 For targets where libcalls and normal calls have different conventions
1762 for returning pointers, we could end up generating incorrect code.
1764 So instead of using a libcall sequence we build up a suitable
1765 CALL_EXPR and expand the call in the normal fashion. */
1766 if (fn == NULL_TREE)
1770 /* This was copied from except.c, I don't know if all this is
1771 necessary in this context or not. */
1772 fn = get_identifier ("memcpy");
1773 fntype = build_pointer_type (void_type_node);
1774 fntype = build_function_type (fntype, NULL_TREE);
1775 fn = build_decl (FUNCTION_DECL, fn, fntype);
1776 ggc_add_tree_root (&fn, 1);
1777 DECL_EXTERNAL (fn) = 1;
1778 TREE_PUBLIC (fn) = 1;
1779 DECL_ARTIFICIAL (fn) = 1;
1780 TREE_NOTHROW (fn) = 1;
1781 make_decl_rtl (fn, NULL);
1782 assemble_external (fn);
1785 /* We need to make an argument list for the function call.
1787 memcpy has three arguments, the first two are void * addresses and
1788 the last is a size_t byte count for the copy. */
1790 = build_tree_list (NULL_TREE,
1791 make_tree (build_pointer_type (void_type_node), x));
1792 TREE_CHAIN (arg_list)
1793 = build_tree_list (NULL_TREE,
1794 make_tree (build_pointer_type (void_type_node), y));
1795 TREE_CHAIN (TREE_CHAIN (arg_list))
1796 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1797 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1799 /* Now we have to build up the CALL_EXPR itself. */
1800 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1801 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1802 call_expr, arg_list, NULL_TREE);
1803 TREE_SIDE_EFFECTS (call_expr) = 1;
1805 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1807 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1808 VOIDmode, 3, y, Pmode, x, Pmode,
1809 convert_to_mode (TYPE_MODE (integer_type_node), size,
1810 TREE_UNSIGNED (integer_type_node)),
1811 TYPE_MODE (integer_type_node));
1814 /* If we are initializing a readonly value, show the above call
1815 clobbered it. Otherwise, a load from it may erroneously be hoisted
1817 if (RTX_UNCHANGING_P (x))
1818 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1824 /* Copy all or part of a value X into registers starting at REGNO.
1825 The number of registers to be filled is NREGS. */
1828 move_block_to_reg (regno, x, nregs, mode)
1832 enum machine_mode mode;
1835 #ifdef HAVE_load_multiple
1843 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1844 x = validize_mem (force_const_mem (mode, x));
1846 /* See if the machine can do this with a load multiple insn. */
1847 #ifdef HAVE_load_multiple
1848 if (HAVE_load_multiple)
1850 last = get_last_insn ();
1851 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1859 delete_insns_since (last);
1863 for (i = 0; i < nregs; i++)
1864 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1865 operand_subword_force (x, i, mode));
1868 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1869 The number of registers to be filled is NREGS. SIZE indicates the number
1870 of bytes in the object X. */
1873 move_block_from_reg (regno, x, nregs, size)
1880 #ifdef HAVE_store_multiple
1884 enum machine_mode mode;
1889 /* If SIZE is that of a mode no bigger than a word, just use that
1890 mode's store operation. */
1891 if (size <= UNITS_PER_WORD
1892 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1894 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1898 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1899 to the left before storing to memory. Note that the previous test
1900 doesn't handle all cases (e.g. SIZE == 3). */
1901 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1903 rtx tem = operand_subword (x, 0, 1, BLKmode);
1909 shift = expand_shift (LSHIFT_EXPR, word_mode,
1910 gen_rtx_REG (word_mode, regno),
1911 build_int_2 ((UNITS_PER_WORD - size)
1912 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1913 emit_move_insn (tem, shift);
1917 /* See if the machine can do this with a store multiple insn. */
1918 #ifdef HAVE_store_multiple
1919 if (HAVE_store_multiple)
1921 last = get_last_insn ();
1922 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1930 delete_insns_since (last);
1934 for (i = 0; i < nregs; i++)
1936 rtx tem = operand_subword (x, i, 1, BLKmode);
1941 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1945 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1946 registers represented by a PARALLEL. SSIZE represents the total size of
1947 block SRC in bytes, or -1 if not known. */
1948 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1949 the balance will be in what would be the low-order memory addresses, i.e.
1950 left justified for big endian, right justified for little endian. This
1951 happens to be true for the targets currently using this support. If this
1952 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1956 emit_group_load (dst, orig_src, ssize)
1963 if (GET_CODE (dst) != PARALLEL)
1966 /* Check for a NULL entry, used to indicate that the parameter goes
1967 both on the stack and in registers. */
1968 if (XEXP (XVECEXP (dst, 0, 0), 0))
1973 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1975 /* Process the pieces. */
1976 for (i = start; i < XVECLEN (dst, 0); i++)
1978 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1979 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1980 unsigned int bytelen = GET_MODE_SIZE (mode);
1983 /* Handle trailing fragments that run over the size of the struct. */
1984 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1986 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1987 bytelen = ssize - bytepos;
1992 /* If we won't be loading directly from memory, protect the real source
1993 from strange tricks we might play; but make sure that the source can
1994 be loaded directly into the destination. */
1996 if (GET_CODE (orig_src) != MEM
1997 && (!CONSTANT_P (orig_src)
1998 || (GET_MODE (orig_src) != mode
1999 && GET_MODE (orig_src) != VOIDmode)))
2001 if (GET_MODE (orig_src) == VOIDmode)
2002 src = gen_reg_rtx (mode);
2004 src = gen_reg_rtx (GET_MODE (orig_src));
2006 emit_move_insn (src, orig_src);
2009 /* Optimize the access just a bit. */
2010 if (GET_CODE (src) == MEM
2011 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2012 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2013 && bytelen == GET_MODE_SIZE (mode))
2015 tmps[i] = gen_reg_rtx (mode);
2016 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2018 else if (GET_CODE (src) == CONCAT)
2021 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2022 tmps[i] = XEXP (src, 0);
2023 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2024 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2025 tmps[i] = XEXP (src, 1);
2026 else if (bytepos == 0)
2028 rtx mem = assign_stack_temp (GET_MODE (src),
2029 GET_MODE_SIZE (GET_MODE (src)), 0);
2030 emit_move_insn (mem, src);
2031 tmps[i] = adjust_address (mem, mode, 0);
2036 else if (CONSTANT_P (src)
2037 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2040 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2041 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2044 if (BYTES_BIG_ENDIAN && shift)
2045 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2046 tmps[i], 0, OPTAB_WIDEN);
2051 /* Copy the extracted pieces into the proper (probable) hard regs. */
2052 for (i = start; i < XVECLEN (dst, 0); i++)
2053 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2056 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2057 registers represented by a PARALLEL. SSIZE represents the total size of
2058 block DST, or -1 if not known. */
2061 emit_group_store (orig_dst, src, ssize)
2068 if (GET_CODE (src) != PARALLEL)
2071 /* Check for a NULL entry, used to indicate that the parameter goes
2072 both on the stack and in registers. */
2073 if (XEXP (XVECEXP (src, 0, 0), 0))
2078 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2080 /* Copy the (probable) hard regs into pseudos. */
2081 for (i = start; i < XVECLEN (src, 0); i++)
2083 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2084 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2085 emit_move_insn (tmps[i], reg);
2089 /* If we won't be storing directly into memory, protect the real destination
2090 from strange tricks we might play. */
2092 if (GET_CODE (dst) == PARALLEL)
2096 /* We can get a PARALLEL dst if there is a conditional expression in
2097 a return statement. In that case, the dst and src are the same,
2098 so no action is necessary. */
2099 if (rtx_equal_p (dst, src))
2102 /* It is unclear if we can ever reach here, but we may as well handle
2103 it. Allocate a temporary, and split this into a store/load to/from
2106 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2107 emit_group_store (temp, src, ssize);
2108 emit_group_load (dst, temp, ssize);
2111 else if (GET_CODE (dst) != MEM)
2113 dst = gen_reg_rtx (GET_MODE (orig_dst));
2114 /* Make life a bit easier for combine. */
2115 emit_move_insn (dst, const0_rtx);
2118 /* Process the pieces. */
2119 for (i = start; i < XVECLEN (src, 0); i++)
2121 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2122 enum machine_mode mode = GET_MODE (tmps[i]);
2123 unsigned int bytelen = GET_MODE_SIZE (mode);
2125 /* Handle trailing fragments that run over the size of the struct. */
2126 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2128 if (BYTES_BIG_ENDIAN)
2130 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2131 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2132 tmps[i], 0, OPTAB_WIDEN);
2134 bytelen = ssize - bytepos;
2137 /* Optimize the access just a bit. */
2138 if (GET_CODE (dst) == MEM
2139 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2140 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2141 && bytelen == GET_MODE_SIZE (mode))
2142 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2144 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2145 mode, tmps[i], ssize);
2150 /* Copy from the pseudo into the (probable) hard reg. */
2151 if (GET_CODE (dst) == REG)
2152 emit_move_insn (orig_dst, dst);
2155 /* Generate code to copy a BLKmode object of TYPE out of a
2156 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2157 is null, a stack temporary is created. TGTBLK is returned.
2159 The primary purpose of this routine is to handle functions
2160 that return BLKmode structures in registers. Some machines
2161 (the PA for example) want to return all small structures
2162 in registers regardless of the structure's alignment. */
2165 copy_blkmode_from_reg (tgtblk, srcreg, type)
2170 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2171 rtx src = NULL, dst = NULL;
2172 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2173 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2177 tgtblk = assign_temp (build_qualified_type (type,
2179 | TYPE_QUAL_CONST)),
2181 preserve_temp_slots (tgtblk);
2184 /* This code assumes srcreg is at least a full word. If it isn't,
2185 copy it into a new pseudo which is a full word. */
2186 if (GET_MODE (srcreg) != BLKmode
2187 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2188 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2190 /* Structures whose size is not a multiple of a word are aligned
2191 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2192 machine, this means we must skip the empty high order bytes when
2193 calculating the bit offset. */
2194 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2195 big_endian_correction
2196 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2198 /* Copy the structure BITSIZE bites at a time.
2200 We could probably emit more efficient code for machines which do not use
2201 strict alignment, but it doesn't seem worth the effort at the current
2203 for (bitpos = 0, xbitpos = big_endian_correction;
2204 bitpos < bytes * BITS_PER_UNIT;
2205 bitpos += bitsize, xbitpos += bitsize)
2207 /* We need a new source operand each time xbitpos is on a
2208 word boundary and when xbitpos == big_endian_correction
2209 (the first time through). */
2210 if (xbitpos % BITS_PER_WORD == 0
2211 || xbitpos == big_endian_correction)
2212 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2215 /* We need a new destination operand each time bitpos is on
2217 if (bitpos % BITS_PER_WORD == 0)
2218 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2220 /* Use xbitpos for the source extraction (right justified) and
2221 xbitpos for the destination store (left justified). */
2222 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2223 extract_bit_field (src, bitsize,
2224 xbitpos % BITS_PER_WORD, 1,
2225 NULL_RTX, word_mode, word_mode,
2233 /* Add a USE expression for REG to the (possibly empty) list pointed
2234 to by CALL_FUSAGE. REG must denote a hard register. */
2237 use_reg (call_fusage, reg)
2238 rtx *call_fusage, reg;
2240 if (GET_CODE (reg) != REG
2241 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2245 = gen_rtx_EXPR_LIST (VOIDmode,
2246 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2249 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2250 starting at REGNO. All of these registers must be hard registers. */
2253 use_regs (call_fusage, regno, nregs)
2260 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2263 for (i = 0; i < nregs; i++)
2264 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2267 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2268 PARALLEL REGS. This is for calls that pass values in multiple
2269 non-contiguous locations. The Irix 6 ABI has examples of this. */
2272 use_group_regs (call_fusage, regs)
2278 for (i = 0; i < XVECLEN (regs, 0); i++)
2280 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2282 /* A NULL entry means the parameter goes both on the stack and in
2283 registers. This can also be a MEM for targets that pass values
2284 partially on the stack and partially in registers. */
2285 if (reg != 0 && GET_CODE (reg) == REG)
2286 use_reg (call_fusage, reg);
2292 can_store_by_pieces (len, constfun, constfundata, align)
2293 unsigned HOST_WIDE_INT len;
2294 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2298 unsigned HOST_WIDE_INT max_size, l;
2299 HOST_WIDE_INT offset = 0;
2300 enum machine_mode mode, tmode;
2301 enum insn_code icode;
2305 if (! MOVE_BY_PIECES_P (len, align))
2308 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2309 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2310 align = MOVE_MAX * BITS_PER_UNIT;
2312 /* We would first store what we can in the largest integer mode, then go to
2313 successively smaller modes. */
2316 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2321 max_size = MOVE_MAX_PIECES + 1;
2322 while (max_size > 1)
2324 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2325 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2326 if (GET_MODE_SIZE (tmode) < max_size)
2329 if (mode == VOIDmode)
2332 icode = mov_optab->handlers[(int) mode].insn_code;
2333 if (icode != CODE_FOR_nothing
2334 && align >= GET_MODE_ALIGNMENT (mode))
2336 unsigned int size = GET_MODE_SIZE (mode);
2343 cst = (*constfun) (constfundata, offset, mode);
2344 if (!LEGITIMATE_CONSTANT_P (cst))
2354 max_size = GET_MODE_SIZE (mode);
2357 /* The code above should have handled everything. */
2365 /* Generate several move instructions to store LEN bytes generated by
2366 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2367 pointer which will be passed as argument in every CONSTFUN call.
2368 ALIGN is maximum alignment we can assume. */
2371 store_by_pieces (to, len, constfun, constfundata, align)
2373 unsigned HOST_WIDE_INT len;
2374 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2378 struct store_by_pieces data;
2380 if (! MOVE_BY_PIECES_P (len, align))
2382 to = protect_from_queue (to, 1);
2383 data.constfun = constfun;
2384 data.constfundata = constfundata;
2387 store_by_pieces_1 (&data, align);
2390 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2391 rtx with BLKmode). The caller must pass TO through protect_from_queue
2392 before calling. ALIGN is maximum alignment we can assume. */
2395 clear_by_pieces (to, len, align)
2397 unsigned HOST_WIDE_INT len;
2400 struct store_by_pieces data;
2402 data.constfun = clear_by_pieces_1;
2403 data.constfundata = NULL;
2406 store_by_pieces_1 (&data, align);
2409 /* Callback routine for clear_by_pieces.
2410 Return const0_rtx unconditionally. */
2413 clear_by_pieces_1 (data, offset, mode)
2414 PTR data ATTRIBUTE_UNUSED;
2415 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2416 enum machine_mode mode ATTRIBUTE_UNUSED;
2421 /* Subroutine of clear_by_pieces and store_by_pieces.
2422 Generate several move instructions to store LEN bytes of block TO. (A MEM
2423 rtx with BLKmode). The caller must pass TO through protect_from_queue
2424 before calling. ALIGN is maximum alignment we can assume. */
2427 store_by_pieces_1 (data, align)
2428 struct store_by_pieces *data;
2431 rtx to_addr = XEXP (data->to, 0);
2432 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2433 enum machine_mode mode = VOIDmode, tmode;
2434 enum insn_code icode;
2437 data->to_addr = to_addr;
2439 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2440 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2442 data->explicit_inc_to = 0;
2444 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2446 data->offset = data->len;
2448 /* If storing requires more than two move insns,
2449 copy addresses to registers (to make displacements shorter)
2450 and use post-increment if available. */
2451 if (!data->autinc_to
2452 && move_by_pieces_ninsns (data->len, align) > 2)
2454 /* Determine the main mode we'll be using. */
2455 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2456 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2457 if (GET_MODE_SIZE (tmode) < max_size)
2460 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2462 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2463 data->autinc_to = 1;
2464 data->explicit_inc_to = -1;
2467 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2468 && ! data->autinc_to)
2470 data->to_addr = copy_addr_to_reg (to_addr);
2471 data->autinc_to = 1;
2472 data->explicit_inc_to = 1;
2475 if ( !data->autinc_to && CONSTANT_P (to_addr))
2476 data->to_addr = copy_addr_to_reg (to_addr);
2479 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2480 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2481 align = MOVE_MAX * BITS_PER_UNIT;
2483 /* First store what we can in the largest integer mode, then go to
2484 successively smaller modes. */
2486 while (max_size > 1)
2488 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2489 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2490 if (GET_MODE_SIZE (tmode) < max_size)
2493 if (mode == VOIDmode)
2496 icode = mov_optab->handlers[(int) mode].insn_code;
2497 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2498 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2500 max_size = GET_MODE_SIZE (mode);
2503 /* The code above should have handled everything. */
2508 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2509 with move instructions for mode MODE. GENFUN is the gen_... function
2510 to make a move insn for that mode. DATA has all the other info. */
2513 store_by_pieces_2 (genfun, mode, data)
2514 rtx (*genfun) PARAMS ((rtx, ...));
2515 enum machine_mode mode;
2516 struct store_by_pieces *data;
2518 unsigned int size = GET_MODE_SIZE (mode);
2521 while (data->len >= size)
2524 data->offset -= size;
2526 if (data->autinc_to)
2528 to1 = replace_equiv_address (data->to, data->to_addr);
2529 to1 = adjust_address (to1, mode, 0);
2532 to1 = adjust_address (data->to, mode, data->offset);
2534 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2535 emit_insn (gen_add2_insn (data->to_addr,
2536 GEN_INT (-(HOST_WIDE_INT) size)));
2538 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2539 emit_insn ((*genfun) (to1, cst));
2541 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2542 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2544 if (! data->reverse)
2545 data->offset += size;
2551 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2552 its length in bytes. */
2555 clear_storage (object, size)
2559 #ifdef TARGET_MEM_FUNCTIONS
2561 tree call_expr, arg_list;
2564 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2565 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2567 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2568 just move a zero. Otherwise, do this a piece at a time. */
2569 if (GET_MODE (object) != BLKmode
2570 && GET_CODE (size) == CONST_INT
2571 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2572 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2575 object = protect_from_queue (object, 1);
2576 size = protect_from_queue (size, 0);
2578 if (GET_CODE (size) == CONST_INT
2579 && MOVE_BY_PIECES_P (INTVAL (size), align))
2580 clear_by_pieces (object, INTVAL (size), align);
2583 /* Try the most limited insn first, because there's no point
2584 including more than one in the machine description unless
2585 the more limited one has some advantage. */
2587 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2588 enum machine_mode mode;
2590 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2591 mode = GET_MODE_WIDER_MODE (mode))
2593 enum insn_code code = clrstr_optab[(int) mode];
2594 insn_operand_predicate_fn pred;
2596 if (code != CODE_FOR_nothing
2597 /* We don't need MODE to be narrower than
2598 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2599 the mode mask, as it is returned by the macro, it will
2600 definitely be less than the actual mode mask. */
2601 && ((GET_CODE (size) == CONST_INT
2602 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2603 <= (GET_MODE_MASK (mode) >> 1)))
2604 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2605 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2606 || (*pred) (object, BLKmode))
2607 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2608 || (*pred) (opalign, VOIDmode)))
2611 rtx last = get_last_insn ();
2614 op1 = convert_to_mode (mode, size, 1);
2615 pred = insn_data[(int) code].operand[1].predicate;
2616 if (pred != 0 && ! (*pred) (op1, mode))
2617 op1 = copy_to_mode_reg (mode, op1);
2619 pat = GEN_FCN ((int) code) (object, op1, opalign);
2626 delete_insns_since (last);
2630 /* OBJECT or SIZE may have been passed through protect_from_queue.
2632 It is unsafe to save the value generated by protect_from_queue
2633 and reuse it later. Consider what happens if emit_queue is
2634 called before the return value from protect_from_queue is used.
2636 Expansion of the CALL_EXPR below will call emit_queue before
2637 we are finished emitting RTL for argument setup. So if we are
2638 not careful we could get the wrong value for an argument.
2640 To avoid this problem we go ahead and emit code to copy OBJECT
2641 and SIZE into new pseudos. We can then place those new pseudos
2642 into an RTL_EXPR and use them later, even after a call to
2645 Note this is not strictly needed for library calls since they
2646 do not call emit_queue before loading their arguments. However,
2647 we may need to have library calls call emit_queue in the future
2648 since failing to do so could cause problems for targets which
2649 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2650 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2652 #ifdef TARGET_MEM_FUNCTIONS
2653 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2655 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2656 TREE_UNSIGNED (integer_type_node));
2657 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2660 #ifdef TARGET_MEM_FUNCTIONS
2661 /* It is incorrect to use the libcall calling conventions to call
2662 memset in this context.
2664 This could be a user call to memset and the user may wish to
2665 examine the return value from memset.
2667 For targets where libcalls and normal calls have different
2668 conventions for returning pointers, we could end up generating
2671 So instead of using a libcall sequence we build up a suitable
2672 CALL_EXPR and expand the call in the normal fashion. */
2673 if (fn == NULL_TREE)
2677 /* This was copied from except.c, I don't know if all this is
2678 necessary in this context or not. */
2679 fn = get_identifier ("memset");
2680 fntype = build_pointer_type (void_type_node);
2681 fntype = build_function_type (fntype, NULL_TREE);
2682 fn = build_decl (FUNCTION_DECL, fn, fntype);
2683 ggc_add_tree_root (&fn, 1);
2684 DECL_EXTERNAL (fn) = 1;
2685 TREE_PUBLIC (fn) = 1;
2686 DECL_ARTIFICIAL (fn) = 1;
2687 TREE_NOTHROW (fn) = 1;
2688 make_decl_rtl (fn, NULL);
2689 assemble_external (fn);
2692 /* We need to make an argument list for the function call.
2694 memset has three arguments, the first is a void * addresses, the
2695 second an integer with the initialization value, the last is a
2696 size_t byte count for the copy. */
2698 = build_tree_list (NULL_TREE,
2699 make_tree (build_pointer_type (void_type_node),
2701 TREE_CHAIN (arg_list)
2702 = build_tree_list (NULL_TREE,
2703 make_tree (integer_type_node, const0_rtx));
2704 TREE_CHAIN (TREE_CHAIN (arg_list))
2705 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2706 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2708 /* Now we have to build up the CALL_EXPR itself. */
2709 call_expr = build1 (ADDR_EXPR,
2710 build_pointer_type (TREE_TYPE (fn)), fn);
2711 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2712 call_expr, arg_list, NULL_TREE);
2713 TREE_SIDE_EFFECTS (call_expr) = 1;
2715 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2717 emit_library_call (bzero_libfunc, LCT_NORMAL,
2718 VOIDmode, 2, object, Pmode, size,
2719 TYPE_MODE (integer_type_node));
2722 /* If we are initializing a readonly value, show the above call
2723 clobbered it. Otherwise, a load from it may erroneously be
2724 hoisted from a loop. */
2725 if (RTX_UNCHANGING_P (object))
2726 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2733 /* Generate code to copy Y into X.
2734 Both Y and X must have the same mode, except that
2735 Y can be a constant with VOIDmode.
2736 This mode cannot be BLKmode; use emit_block_move for that.
2738 Return the last instruction emitted. */
2741 emit_move_insn (x, y)
2744 enum machine_mode mode = GET_MODE (x);
2745 rtx y_cst = NULL_RTX;
2748 x = protect_from_queue (x, 1);
2749 y = protect_from_queue (y, 0);
2751 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2754 /* Never force constant_p_rtx to memory. */
2755 if (GET_CODE (y) == CONSTANT_P_RTX)
2757 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2760 y = force_const_mem (mode, y);
2763 /* If X or Y are memory references, verify that their addresses are valid
2765 if (GET_CODE (x) == MEM
2766 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2767 && ! push_operand (x, GET_MODE (x)))
2769 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2770 x = validize_mem (x);
2772 if (GET_CODE (y) == MEM
2773 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2775 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2776 y = validize_mem (y);
2778 if (mode == BLKmode)
2781 last_insn = emit_move_insn_1 (x, y);
2783 if (y_cst && GET_CODE (x) == REG)
2784 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2789 /* Low level part of emit_move_insn.
2790 Called just like emit_move_insn, but assumes X and Y
2791 are basically valid. */
2794 emit_move_insn_1 (x, y)
2797 enum machine_mode mode = GET_MODE (x);
2798 enum machine_mode submode;
2799 enum mode_class class = GET_MODE_CLASS (mode);
2802 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2805 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2807 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2809 /* Expand complex moves by moving real part and imag part, if possible. */
2810 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2811 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2813 (class == MODE_COMPLEX_INT
2814 ? MODE_INT : MODE_FLOAT),
2816 && (mov_optab->handlers[(int) submode].insn_code
2817 != CODE_FOR_nothing))
2819 /* Don't split destination if it is a stack push. */
2820 int stack = push_operand (x, GET_MODE (x));
2822 #ifdef PUSH_ROUNDING
2823 /* In case we output to the stack, but the size is smaller machine can
2824 push exactly, we need to use move instructions. */
2826 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2829 int offset1, offset2;
2831 /* Do not use anti_adjust_stack, since we don't want to update
2832 stack_pointer_delta. */
2833 temp = expand_binop (Pmode,
2834 #ifdef STACK_GROWS_DOWNWARD
2841 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2845 if (temp != stack_pointer_rtx)
2846 emit_move_insn (stack_pointer_rtx, temp);
2847 #ifdef STACK_GROWS_DOWNWARD
2849 offset2 = GET_MODE_SIZE (submode);
2851 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2852 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2853 + GET_MODE_SIZE (submode));
2855 emit_move_insn (change_address (x, submode,
2856 gen_rtx_PLUS (Pmode,
2858 GEN_INT (offset1))),
2859 gen_realpart (submode, y));
2860 emit_move_insn (change_address (x, submode,
2861 gen_rtx_PLUS (Pmode,
2863 GEN_INT (offset2))),
2864 gen_imagpart (submode, y));
2868 /* If this is a stack, push the highpart first, so it
2869 will be in the argument order.
2871 In that case, change_address is used only to convert
2872 the mode, not to change the address. */
2875 /* Note that the real part always precedes the imag part in memory
2876 regardless of machine's endianness. */
2877 #ifdef STACK_GROWS_DOWNWARD
2878 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2879 (gen_rtx_MEM (submode, XEXP (x, 0)),
2880 gen_imagpart (submode, y)));
2881 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2882 (gen_rtx_MEM (submode, XEXP (x, 0)),
2883 gen_realpart (submode, y)));
2885 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2886 (gen_rtx_MEM (submode, XEXP (x, 0)),
2887 gen_realpart (submode, y)));
2888 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2889 (gen_rtx_MEM (submode, XEXP (x, 0)),
2890 gen_imagpart (submode, y)));
2895 rtx realpart_x, realpart_y;
2896 rtx imagpart_x, imagpart_y;
2898 /* If this is a complex value with each part being smaller than a
2899 word, the usual calling sequence will likely pack the pieces into
2900 a single register. Unfortunately, SUBREG of hard registers only
2901 deals in terms of words, so we have a problem converting input
2902 arguments to the CONCAT of two registers that is used elsewhere
2903 for complex values. If this is before reload, we can copy it into
2904 memory and reload. FIXME, we should see about using extract and
2905 insert on integer registers, but complex short and complex char
2906 variables should be rarely used. */
2907 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2908 && (reload_in_progress | reload_completed) == 0)
2910 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2911 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2913 if (packed_dest_p || packed_src_p)
2915 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2916 ? MODE_FLOAT : MODE_INT);
2918 enum machine_mode reg_mode
2919 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2921 if (reg_mode != BLKmode)
2923 rtx mem = assign_stack_temp (reg_mode,
2924 GET_MODE_SIZE (mode), 0);
2925 rtx cmem = adjust_address (mem, mode, 0);
2928 = N_("function using short complex types cannot be inline");
2932 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2933 emit_move_insn_1 (cmem, y);
2934 return emit_move_insn_1 (sreg, mem);
2938 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2939 emit_move_insn_1 (mem, sreg);
2940 return emit_move_insn_1 (x, cmem);
2946 realpart_x = gen_realpart (submode, x);
2947 realpart_y = gen_realpart (submode, y);
2948 imagpart_x = gen_imagpart (submode, x);
2949 imagpart_y = gen_imagpart (submode, y);
2951 /* Show the output dies here. This is necessary for SUBREGs
2952 of pseudos since we cannot track their lifetimes correctly;
2953 hard regs shouldn't appear here except as return values.
2954 We never want to emit such a clobber after reload. */
2956 && ! (reload_in_progress || reload_completed)
2957 && (GET_CODE (realpart_x) == SUBREG
2958 || GET_CODE (imagpart_x) == SUBREG))
2960 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2963 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2964 (realpart_x, realpart_y));
2965 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2966 (imagpart_x, imagpart_y));
2969 return get_last_insn ();
2972 /* This will handle any multi-word mode that lacks a move_insn pattern.
2973 However, you will get better code if you define such patterns,
2974 even if they must turn into multiple assembler instructions. */
2975 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2981 #ifdef PUSH_ROUNDING
2983 /* If X is a push on the stack, do the push now and replace
2984 X with a reference to the stack pointer. */
2985 if (push_operand (x, GET_MODE (x)))
2990 /* Do not use anti_adjust_stack, since we don't want to update
2991 stack_pointer_delta. */
2992 temp = expand_binop (Pmode,
2993 #ifdef STACK_GROWS_DOWNWARD
3000 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
3004 if (temp != stack_pointer_rtx)
3005 emit_move_insn (stack_pointer_rtx, temp);
3007 code = GET_CODE (XEXP (x, 0));
3008 /* Just hope that small offsets off SP are OK. */
3009 if (code == POST_INC)
3010 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3011 GEN_INT (-(HOST_WIDE_INT)
3012 GET_MODE_SIZE (GET_MODE (x))));
3013 else if (code == POST_DEC)
3014 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3015 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3017 temp = stack_pointer_rtx;
3019 x = change_address (x, VOIDmode, temp);
3023 /* If we are in reload, see if either operand is a MEM whose address
3024 is scheduled for replacement. */
3025 if (reload_in_progress && GET_CODE (x) == MEM
3026 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3027 x = replace_equiv_address_nv (x, inner);
3028 if (reload_in_progress && GET_CODE (y) == MEM
3029 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3030 y = replace_equiv_address_nv (y, inner);
3036 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3039 rtx xpart = operand_subword (x, i, 1, mode);
3040 rtx ypart = operand_subword (y, i, 1, mode);
3042 /* If we can't get a part of Y, put Y into memory if it is a
3043 constant. Otherwise, force it into a register. If we still
3044 can't get a part of Y, abort. */
3045 if (ypart == 0 && CONSTANT_P (y))
3047 y = force_const_mem (mode, y);
3048 ypart = operand_subword (y, i, 1, mode);
3050 else if (ypart == 0)
3051 ypart = operand_subword_force (y, i, mode);
3053 if (xpart == 0 || ypart == 0)
3056 need_clobber |= (GET_CODE (xpart) == SUBREG);
3058 last_insn = emit_move_insn (xpart, ypart);
3061 seq = gen_sequence ();
3064 /* Show the output dies here. This is necessary for SUBREGs
3065 of pseudos since we cannot track their lifetimes correctly;
3066 hard regs shouldn't appear here except as return values.
3067 We never want to emit such a clobber after reload. */
3069 && ! (reload_in_progress || reload_completed)
3070 && need_clobber != 0)
3072 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3083 /* Pushing data onto the stack. */
3085 /* Push a block of length SIZE (perhaps variable)
3086 and return an rtx to address the beginning of the block.
3087 Note that it is not possible for the value returned to be a QUEUED.
3088 The value may be virtual_outgoing_args_rtx.
3090 EXTRA is the number of bytes of padding to push in addition to SIZE.
3091 BELOW nonzero means this padding comes at low addresses;
3092 otherwise, the padding comes at high addresses. */
3095 push_block (size, extra, below)
3101 size = convert_modes (Pmode, ptr_mode, size, 1);
3102 if (CONSTANT_P (size))
3103 anti_adjust_stack (plus_constant (size, extra));
3104 else if (GET_CODE (size) == REG && extra == 0)
3105 anti_adjust_stack (size);
3108 temp = copy_to_mode_reg (Pmode, size);
3110 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3111 temp, 0, OPTAB_LIB_WIDEN);
3112 anti_adjust_stack (temp);
3115 #ifndef STACK_GROWS_DOWNWARD
3121 temp = virtual_outgoing_args_rtx;
3122 if (extra != 0 && below)
3123 temp = plus_constant (temp, extra);
3127 if (GET_CODE (size) == CONST_INT)
3128 temp = plus_constant (virtual_outgoing_args_rtx,
3129 -INTVAL (size) - (below ? 0 : extra));
3130 else if (extra != 0 && !below)
3131 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3132 negate_rtx (Pmode, plus_constant (size, extra)));
3134 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3135 negate_rtx (Pmode, size));
3138 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3142 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3143 block of SIZE bytes. */
3146 get_push_address (size)
3151 if (STACK_PUSH_CODE == POST_DEC)
3152 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3153 else if (STACK_PUSH_CODE == POST_INC)
3154 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3156 temp = stack_pointer_rtx;
3158 return copy_to_reg (temp);
3161 #ifdef PUSH_ROUNDING
3163 /* Emit single push insn. */
3166 emit_single_push_insn (mode, x, type)
3168 enum machine_mode mode;
3172 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3174 enum insn_code icode;
3175 insn_operand_predicate_fn pred;
3177 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3178 /* If there is push pattern, use it. Otherwise try old way of throwing
3179 MEM representing push operation to move expander. */
3180 icode = push_optab->handlers[(int) mode].insn_code;
3181 if (icode != CODE_FOR_nothing)
3183 if (((pred = insn_data[(int) icode].operand[0].predicate)
3184 && !((*pred) (x, mode))))
3185 x = force_reg (mode, x);
3186 emit_insn (GEN_FCN (icode) (x));
3189 if (GET_MODE_SIZE (mode) == rounded_size)
3190 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3193 #ifdef STACK_GROWS_DOWNWARD
3194 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3195 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3197 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3198 GEN_INT (rounded_size));
3200 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3203 dest = gen_rtx_MEM (mode, dest_addr);
3207 set_mem_attributes (dest, type, 1);
3208 /* Function incoming arguments may overlap with sibling call
3209 outgoing arguments and we cannot allow reordering of reads
3210 from function arguments with stores to outgoing arguments
3211 of sibling calls. */
3212 set_mem_alias_set (dest, 0);
3214 emit_move_insn (dest, x);
3218 /* Generate code to push X onto the stack, assuming it has mode MODE and
3220 MODE is redundant except when X is a CONST_INT (since they don't
3222 SIZE is an rtx for the size of data to be copied (in bytes),
3223 needed only if X is BLKmode.
3225 ALIGN (in bits) is maximum alignment we can assume.
3227 If PARTIAL and REG are both nonzero, then copy that many of the first
3228 words of X into registers starting with REG, and push the rest of X.
3229 The amount of space pushed is decreased by PARTIAL words,
3230 rounded *down* to a multiple of PARM_BOUNDARY.
3231 REG must be a hard register in this case.
3232 If REG is zero but PARTIAL is not, take any all others actions for an
3233 argument partially in registers, but do not actually load any
3236 EXTRA is the amount in bytes of extra space to leave next to this arg.
3237 This is ignored if an argument block has already been allocated.
3239 On a machine that lacks real push insns, ARGS_ADDR is the address of
3240 the bottom of the argument block for this call. We use indexing off there
3241 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3242 argument block has not been preallocated.
3244 ARGS_SO_FAR is the size of args previously pushed for this call.
3246 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3247 for arguments passed in registers. If nonzero, it will be the number
3248 of bytes required. */
3251 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3252 args_addr, args_so_far, reg_parm_stack_space,
3255 enum machine_mode mode;
3264 int reg_parm_stack_space;
3268 enum direction stack_direction
3269 #ifdef STACK_GROWS_DOWNWARD
3275 /* Decide where to pad the argument: `downward' for below,
3276 `upward' for above, or `none' for don't pad it.
3277 Default is below for small data on big-endian machines; else above. */
3278 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3280 /* Invert direction if stack is post-decrement.
3282 if (STACK_PUSH_CODE == POST_DEC)
3283 if (where_pad != none)
3284 where_pad = (where_pad == downward ? upward : downward);
3286 xinner = x = protect_from_queue (x, 0);
3288 if (mode == BLKmode)
3290 /* Copy a block into the stack, entirely or partially. */
3293 int used = partial * UNITS_PER_WORD;
3294 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3302 /* USED is now the # of bytes we need not copy to the stack
3303 because registers will take care of them. */
3306 xinner = adjust_address (xinner, BLKmode, used);
3308 /* If the partial register-part of the arg counts in its stack size,
3309 skip the part of stack space corresponding to the registers.
3310 Otherwise, start copying to the beginning of the stack space,
3311 by setting SKIP to 0. */
3312 skip = (reg_parm_stack_space == 0) ? 0 : used;
3314 #ifdef PUSH_ROUNDING
3315 /* Do it with several push insns if that doesn't take lots of insns
3316 and if there is no difficulty with push insns that skip bytes
3317 on the stack for alignment purposes. */
3320 && GET_CODE (size) == CONST_INT
3322 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3323 /* Here we avoid the case of a structure whose weak alignment
3324 forces many pushes of a small amount of data,
3325 and such small pushes do rounding that causes trouble. */
3326 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3327 || align >= BIGGEST_ALIGNMENT
3328 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3329 == (align / BITS_PER_UNIT)))
3330 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3332 /* Push padding now if padding above and stack grows down,
3333 or if padding below and stack grows up.
3334 But if space already allocated, this has already been done. */
3335 if (extra && args_addr == 0
3336 && where_pad != none && where_pad != stack_direction)
3337 anti_adjust_stack (GEN_INT (extra));
3339 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3341 if (current_function_check_memory_usage && ! in_check_memory_usage)
3345 in_check_memory_usage = 1;
3346 temp = get_push_address (INTVAL (size) - used);
3347 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3348 emit_library_call (chkr_copy_bitmap_libfunc,
3349 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3350 Pmode, XEXP (xinner, 0), Pmode,
3351 GEN_INT (INTVAL (size) - used),
3352 TYPE_MODE (sizetype));
3354 emit_library_call (chkr_set_right_libfunc,
3355 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3356 Pmode, GEN_INT (INTVAL (size) - used),
3357 TYPE_MODE (sizetype),
3358 GEN_INT (MEMORY_USE_RW),
3359 TYPE_MODE (integer_type_node));
3360 in_check_memory_usage = 0;
3364 #endif /* PUSH_ROUNDING */
3368 /* Otherwise make space on the stack and copy the data
3369 to the address of that space. */
3371 /* Deduct words put into registers from the size we must copy. */
3374 if (GET_CODE (size) == CONST_INT)
3375 size = GEN_INT (INTVAL (size) - used);
3377 size = expand_binop (GET_MODE (size), sub_optab, size,
3378 GEN_INT (used), NULL_RTX, 0,
3382 /* Get the address of the stack space.
3383 In this case, we do not deal with EXTRA separately.
3384 A single stack adjust will do. */
3387 temp = push_block (size, extra, where_pad == downward);
3390 else if (GET_CODE (args_so_far) == CONST_INT)
3391 temp = memory_address (BLKmode,
3392 plus_constant (args_addr,
3393 skip + INTVAL (args_so_far)));
3395 temp = memory_address (BLKmode,
3396 plus_constant (gen_rtx_PLUS (Pmode,
3400 if (current_function_check_memory_usage && ! in_check_memory_usage)
3402 in_check_memory_usage = 1;
3403 target = copy_to_reg (temp);
3404 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3405 emit_library_call (chkr_copy_bitmap_libfunc,
3406 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3408 XEXP (xinner, 0), Pmode,
3409 size, TYPE_MODE (sizetype));
3411 emit_library_call (chkr_set_right_libfunc,
3412 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3414 size, TYPE_MODE (sizetype),
3415 GEN_INT (MEMORY_USE_RW),
3416 TYPE_MODE (integer_type_node));
3417 in_check_memory_usage = 0;
3420 target = gen_rtx_MEM (BLKmode, temp);
3424 set_mem_attributes (target, type, 1);
3425 /* Function incoming arguments may overlap with sibling call
3426 outgoing arguments and we cannot allow reordering of reads
3427 from function arguments with stores to outgoing arguments
3428 of sibling calls. */
3429 set_mem_alias_set (target, 0);
3432 set_mem_align (target, align);
3434 /* TEMP is the address of the block. Copy the data there. */
3435 if (GET_CODE (size) == CONST_INT
3436 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3438 move_by_pieces (target, xinner, INTVAL (size), align);
3443 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3444 enum machine_mode mode;
3446 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3448 mode = GET_MODE_WIDER_MODE (mode))
3450 enum insn_code code = movstr_optab[(int) mode];
3451 insn_operand_predicate_fn pred;
3453 if (code != CODE_FOR_nothing
3454 && ((GET_CODE (size) == CONST_INT
3455 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3456 <= (GET_MODE_MASK (mode) >> 1)))
3457 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3458 && (!(pred = insn_data[(int) code].operand[0].predicate)
3459 || ((*pred) (target, BLKmode)))
3460 && (!(pred = insn_data[(int) code].operand[1].predicate)
3461 || ((*pred) (xinner, BLKmode)))
3462 && (!(pred = insn_data[(int) code].operand[3].predicate)
3463 || ((*pred) (opalign, VOIDmode))))
3465 rtx op2 = convert_to_mode (mode, size, 1);
3466 rtx last = get_last_insn ();
3469 pred = insn_data[(int) code].operand[2].predicate;
3470 if (pred != 0 && ! (*pred) (op2, mode))
3471 op2 = copy_to_mode_reg (mode, op2);
3473 pat = GEN_FCN ((int) code) (target, xinner,
3481 delete_insns_since (last);
3486 if (!ACCUMULATE_OUTGOING_ARGS)
3488 /* If the source is referenced relative to the stack pointer,
3489 copy it to another register to stabilize it. We do not need
3490 to do this if we know that we won't be changing sp. */
3492 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3493 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3494 temp = copy_to_reg (temp);
3497 /* Make inhibit_defer_pop nonzero around the library call
3498 to force it to pop the bcopy-arguments right away. */
3500 #ifdef TARGET_MEM_FUNCTIONS
3501 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3502 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3503 convert_to_mode (TYPE_MODE (sizetype),
3504 size, TREE_UNSIGNED (sizetype)),
3505 TYPE_MODE (sizetype));
3507 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3508 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3509 convert_to_mode (TYPE_MODE (integer_type_node),
3511 TREE_UNSIGNED (integer_type_node)),
3512 TYPE_MODE (integer_type_node));
3517 else if (partial > 0)
3519 /* Scalar partly in registers. */
3521 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3524 /* # words of start of argument
3525 that we must make space for but need not store. */
3526 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3527 int args_offset = INTVAL (args_so_far);
3530 /* Push padding now if padding above and stack grows down,
3531 or if padding below and stack grows up.
3532 But if space already allocated, this has already been done. */
3533 if (extra && args_addr == 0
3534 && where_pad != none && where_pad != stack_direction)
3535 anti_adjust_stack (GEN_INT (extra));
3537 /* If we make space by pushing it, we might as well push
3538 the real data. Otherwise, we can leave OFFSET nonzero
3539 and leave the space uninitialized. */
3543 /* Now NOT_STACK gets the number of words that we don't need to
3544 allocate on the stack. */
3545 not_stack = partial - offset;
3547 /* If the partial register-part of the arg counts in its stack size,
3548 skip the part of stack space corresponding to the registers.
3549 Otherwise, start copying to the beginning of the stack space,
3550 by setting SKIP to 0. */
3551 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3553 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3554 x = validize_mem (force_const_mem (mode, x));
3556 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3557 SUBREGs of such registers are not allowed. */
3558 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3559 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3560 x = copy_to_reg (x);
3562 /* Loop over all the words allocated on the stack for this arg. */
3563 /* We can do it by words, because any scalar bigger than a word
3564 has a size a multiple of a word. */
3565 #ifndef PUSH_ARGS_REVERSED
3566 for (i = not_stack; i < size; i++)
3568 for (i = size - 1; i >= not_stack; i--)
3570 if (i >= not_stack + offset)
3571 emit_push_insn (operand_subword_force (x, i, mode),
3572 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3574 GEN_INT (args_offset + ((i - not_stack + skip)
3576 reg_parm_stack_space, alignment_pad);
3581 rtx target = NULL_RTX;
3584 /* Push padding now if padding above and stack grows down,
3585 or if padding below and stack grows up.
3586 But if space already allocated, this has already been done. */
3587 if (extra && args_addr == 0
3588 && where_pad != none && where_pad != stack_direction)
3589 anti_adjust_stack (GEN_INT (extra));
3591 #ifdef PUSH_ROUNDING
3592 if (args_addr == 0 && PUSH_ARGS)
3593 emit_single_push_insn (mode, x, type);
3597 if (GET_CODE (args_so_far) == CONST_INT)
3599 = memory_address (mode,
3600 plus_constant (args_addr,
3601 INTVAL (args_so_far)));
3603 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3606 dest = gen_rtx_MEM (mode, addr);
3609 set_mem_attributes (dest, type, 1);
3610 /* Function incoming arguments may overlap with sibling call
3611 outgoing arguments and we cannot allow reordering of reads
3612 from function arguments with stores to outgoing arguments
3613 of sibling calls. */
3614 set_mem_alias_set (dest, 0);
3617 emit_move_insn (dest, x);
3621 if (current_function_check_memory_usage && ! in_check_memory_usage)
3623 in_check_memory_usage = 1;
3625 target = get_push_address (GET_MODE_SIZE (mode));
3627 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3628 emit_library_call (chkr_copy_bitmap_libfunc,
3629 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3630 Pmode, XEXP (x, 0), Pmode,
3631 GEN_INT (GET_MODE_SIZE (mode)),
3632 TYPE_MODE (sizetype));
3634 emit_library_call (chkr_set_right_libfunc,
3635 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3636 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3637 TYPE_MODE (sizetype),
3638 GEN_INT (MEMORY_USE_RW),
3639 TYPE_MODE (integer_type_node));
3640 in_check_memory_usage = 0;
3645 /* If part should go in registers, copy that part
3646 into the appropriate registers. Do this now, at the end,
3647 since mem-to-mem copies above may do function calls. */
3648 if (partial > 0 && reg != 0)
3650 /* Handle calls that pass values in multiple non-contiguous locations.
3651 The Irix 6 ABI has examples of this. */
3652 if (GET_CODE (reg) == PARALLEL)
3653 emit_group_load (reg, x, -1); /* ??? size? */
3655 move_block_to_reg (REGNO (reg), x, partial, mode);
3658 if (extra && args_addr == 0 && where_pad == stack_direction)
3659 anti_adjust_stack (GEN_INT (extra));
3661 if (alignment_pad && args_addr == 0)
3662 anti_adjust_stack (alignment_pad);
3665 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3673 /* Only registers can be subtargets. */
3674 || GET_CODE (x) != REG
3675 /* If the register is readonly, it can't be set more than once. */
3676 || RTX_UNCHANGING_P (x)
3677 /* Don't use hard regs to avoid extending their life. */
3678 || REGNO (x) < FIRST_PSEUDO_REGISTER
3679 /* Avoid subtargets inside loops,
3680 since they hide some invariant expressions. */
3681 || preserve_subexpressions_p ())
3685 /* Expand an assignment that stores the value of FROM into TO.
3686 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3687 (This may contain a QUEUED rtx;
3688 if the value is constant, this rtx is a constant.)
3689 Otherwise, the returned value is NULL_RTX.
3691 SUGGEST_REG is no longer actually used.
3692 It used to mean, copy the value through a register
3693 and return that register, if that is possible.
3694 We now use WANT_VALUE to decide whether to do this. */
3697 expand_assignment (to, from, want_value, suggest_reg)
3700 int suggest_reg ATTRIBUTE_UNUSED;
3705 /* Don't crash if the lhs of the assignment was erroneous. */
3707 if (TREE_CODE (to) == ERROR_MARK)
3709 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3710 return want_value ? result : NULL_RTX;
3713 /* Assignment of a structure component needs special treatment
3714 if the structure component's rtx is not simply a MEM.
3715 Assignment of an array element at a constant index, and assignment of
3716 an array element in an unaligned packed structure field, has the same
3719 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3720 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3722 enum machine_mode mode1;
3723 HOST_WIDE_INT bitsize, bitpos;
3728 unsigned int alignment;
3731 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3732 &unsignedp, &volatilep, &alignment);
3734 /* If we are going to use store_bit_field and extract_bit_field,
3735 make sure to_rtx will be safe for multiple use. */
3737 if (mode1 == VOIDmode && want_value)
3738 tem = stabilize_reference (tem);
3740 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3743 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3745 if (GET_CODE (to_rtx) != MEM)
3748 if (GET_MODE (offset_rtx) != ptr_mode)
3749 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3751 #ifdef POINTERS_EXTEND_UNSIGNED
3752 if (GET_MODE (offset_rtx) != Pmode)
3753 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3756 /* A constant address in TO_RTX can have VOIDmode, we must not try
3757 to call force_reg for that case. Avoid that case. */
3758 if (GET_CODE (to_rtx) == MEM
3759 && GET_MODE (to_rtx) == BLKmode
3760 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3762 && (bitpos % bitsize) == 0
3763 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3764 && alignment == GET_MODE_ALIGNMENT (mode1))
3767 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3769 if (GET_CODE (XEXP (temp, 0)) == REG)
3772 to_rtx = (replace_equiv_address
3773 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3778 to_rtx = offset_address (to_rtx, offset_rtx,
3779 highest_pow2_factor (offset));
3784 if (GET_CODE (to_rtx) == MEM)
3786 /* When the offset is zero, to_rtx is the address of the
3787 structure we are storing into, and hence may be shared.
3788 We must make a new MEM before setting the volatile bit. */
3790 to_rtx = copy_rtx (to_rtx);
3792 MEM_VOLATILE_P (to_rtx) = 1;
3794 #if 0 /* This was turned off because, when a field is volatile
3795 in an object which is not volatile, the object may be in a register,
3796 and then we would abort over here. */
3802 if (TREE_CODE (to) == COMPONENT_REF
3803 && TREE_READONLY (TREE_OPERAND (to, 1)))
3806 to_rtx = copy_rtx (to_rtx);
3808 RTX_UNCHANGING_P (to_rtx) = 1;
3811 /* Check the access. */
3812 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3817 enum machine_mode best_mode;
3819 best_mode = get_best_mode (bitsize, bitpos,
3820 TYPE_ALIGN (TREE_TYPE (tem)),
3822 if (best_mode == VOIDmode)
3825 best_mode_size = GET_MODE_BITSIZE (best_mode);
3826 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3827 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3828 size *= GET_MODE_SIZE (best_mode);
3830 /* Check the access right of the pointer. */
3831 in_check_memory_usage = 1;
3833 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3834 VOIDmode, 3, to_addr, Pmode,
3835 GEN_INT (size), TYPE_MODE (sizetype),
3836 GEN_INT (MEMORY_USE_WO),
3837 TYPE_MODE (integer_type_node));
3838 in_check_memory_usage = 0;
3841 /* If this is a varying-length object, we must get the address of
3842 the source and do an explicit block move. */
3845 unsigned int from_align;
3846 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3848 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3850 emit_block_move (inner_to_rtx, from_rtx, expr_size (from));
3858 if (! can_address_p (to))
3860 to_rtx = copy_rtx (to_rtx);
3861 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3864 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3866 /* Spurious cast for HPUX compiler. */
3867 ? ((enum machine_mode)
3868 TYPE_MODE (TREE_TYPE (to)))
3870 unsignedp, int_size_in_bytes (TREE_TYPE (tem)),
3871 get_alias_set (to));
3873 preserve_temp_slots (result);
3877 /* If the value is meaningful, convert RESULT to the proper mode.
3878 Otherwise, return nothing. */
3879 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3880 TYPE_MODE (TREE_TYPE (from)),
3882 TREE_UNSIGNED (TREE_TYPE (to)))
3887 /* If the rhs is a function call and its value is not an aggregate,
3888 call the function before we start to compute the lhs.
3889 This is needed for correct code for cases such as
3890 val = setjmp (buf) on machines where reference to val
3891 requires loading up part of an address in a separate insn.
3893 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3894 since it might be a promoted variable where the zero- or sign- extension
3895 needs to be done. Handling this in the normal way is safe because no
3896 computation is done before the call. */
3897 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3898 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3899 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3900 && GET_CODE (DECL_RTL (to)) == REG))
3905 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3907 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3909 /* Handle calls that return values in multiple non-contiguous locations.
3910 The Irix 6 ABI has examples of this. */
3911 if (GET_CODE (to_rtx) == PARALLEL)
3912 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3913 else if (GET_MODE (to_rtx) == BLKmode)
3914 emit_block_move (to_rtx, value, expr_size (from));
3917 #ifdef POINTERS_EXTEND_UNSIGNED
3918 if (POINTER_TYPE_P (TREE_TYPE (to))
3919 && GET_MODE (to_rtx) != GET_MODE (value))
3920 value = convert_memory_address (GET_MODE (to_rtx), value);
3922 emit_move_insn (to_rtx, value);
3924 preserve_temp_slots (to_rtx);
3927 return want_value ? to_rtx : NULL_RTX;
3930 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3931 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3934 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3936 /* Don't move directly into a return register. */
3937 if (TREE_CODE (to) == RESULT_DECL
3938 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3943 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3945 if (GET_CODE (to_rtx) == PARALLEL)
3946 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3948 emit_move_insn (to_rtx, temp);
3950 preserve_temp_slots (to_rtx);
3953 return want_value ? to_rtx : NULL_RTX;
3956 /* In case we are returning the contents of an object which overlaps
3957 the place the value is being stored, use a safe function when copying
3958 a value through a pointer into a structure value return block. */
3959 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3960 && current_function_returns_struct
3961 && !current_function_returns_pcc_struct)
3966 size = expr_size (from);
3967 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3968 EXPAND_MEMORY_USE_DONT);
3970 /* Copy the rights of the bitmap. */
3971 if (current_function_check_memory_usage)
3972 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3973 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3974 XEXP (from_rtx, 0), Pmode,
3975 convert_to_mode (TYPE_MODE (sizetype),
3976 size, TREE_UNSIGNED (sizetype)),
3977 TYPE_MODE (sizetype));
3979 #ifdef TARGET_MEM_FUNCTIONS
3980 emit_library_call (memmove_libfunc, LCT_NORMAL,
3981 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3982 XEXP (from_rtx, 0), Pmode,
3983 convert_to_mode (TYPE_MODE (sizetype),
3984 size, TREE_UNSIGNED (sizetype)),
3985 TYPE_MODE (sizetype));
3987 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3988 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3989 XEXP (to_rtx, 0), Pmode,
3990 convert_to_mode (TYPE_MODE (integer_type_node),
3991 size, TREE_UNSIGNED (integer_type_node)),
3992 TYPE_MODE (integer_type_node));
3995 preserve_temp_slots (to_rtx);
3998 return want_value ? to_rtx : NULL_RTX;
4001 /* Compute FROM and store the value in the rtx we got. */
4004 result = store_expr (from, to_rtx, want_value);
4005 preserve_temp_slots (result);
4008 return want_value ? result : NULL_RTX;
4011 /* Generate code for computing expression EXP,
4012 and storing the value into TARGET.
4013 TARGET may contain a QUEUED rtx.
4015 If WANT_VALUE is nonzero, return a copy of the value
4016 not in TARGET, so that we can be sure to use the proper
4017 value in a containing expression even if TARGET has something
4018 else stored in it. If possible, we copy the value through a pseudo
4019 and return that pseudo. Or, if the value is constant, we try to
4020 return the constant. In some cases, we return a pseudo
4021 copied *from* TARGET.
4023 If the mode is BLKmode then we may return TARGET itself.
4024 It turns out that in BLKmode it doesn't cause a problem.
4025 because C has no operators that could combine two different
4026 assignments into the same BLKmode object with different values
4027 with no sequence point. Will other languages need this to
4030 If WANT_VALUE is 0, we return NULL, to make sure
4031 to catch quickly any cases where the caller uses the value
4032 and fails to set WANT_VALUE. */
4035 store_expr (exp, target, want_value)
4041 int dont_return_target = 0;
4042 int dont_store_target = 0;
4044 if (TREE_CODE (exp) == COMPOUND_EXPR)
4046 /* Perform first part of compound expression, then assign from second
4048 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4050 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4052 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4054 /* For conditional expression, get safe form of the target. Then
4055 test the condition, doing the appropriate assignment on either
4056 side. This avoids the creation of unnecessary temporaries.
4057 For non-BLKmode, it is more efficient not to do this. */
4059 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4062 target = protect_from_queue (target, 1);
4064 do_pending_stack_adjust ();
4066 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4067 start_cleanup_deferral ();
4068 store_expr (TREE_OPERAND (exp, 1), target, 0);
4069 end_cleanup_deferral ();
4071 emit_jump_insn (gen_jump (lab2));
4074 start_cleanup_deferral ();
4075 store_expr (TREE_OPERAND (exp, 2), target, 0);
4076 end_cleanup_deferral ();
4081 return want_value ? target : NULL_RTX;
4083 else if (queued_subexp_p (target))
4084 /* If target contains a postincrement, let's not risk
4085 using it as the place to generate the rhs. */
4087 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4089 /* Expand EXP into a new pseudo. */
4090 temp = gen_reg_rtx (GET_MODE (target));
4091 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4094 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4096 /* If target is volatile, ANSI requires accessing the value
4097 *from* the target, if it is accessed. So make that happen.
4098 In no case return the target itself. */
4099 if (! MEM_VOLATILE_P (target) && want_value)
4100 dont_return_target = 1;
4102 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4103 && GET_MODE (target) != BLKmode)
4104 /* If target is in memory and caller wants value in a register instead,
4105 arrange that. Pass TARGET as target for expand_expr so that,
4106 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4107 We know expand_expr will not use the target in that case.
4108 Don't do this if TARGET is volatile because we are supposed
4109 to write it and then read it. */
4111 temp = expand_expr (exp, target, GET_MODE (target), 0);
4112 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4114 /* If TEMP is already in the desired TARGET, only copy it from
4115 memory and don't store it there again. */
4117 || (rtx_equal_p (temp, target)
4118 && ! side_effects_p (temp) && ! side_effects_p (target)))
4119 dont_store_target = 1;
4120 temp = copy_to_reg (temp);
4122 dont_return_target = 1;
4124 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4125 /* If this is an scalar in a register that is stored in a wider mode
4126 than the declared mode, compute the result into its declared mode
4127 and then convert to the wider mode. Our value is the computed
4130 /* If we don't want a value, we can do the conversion inside EXP,
4131 which will often result in some optimizations. Do the conversion
4132 in two steps: first change the signedness, if needed, then
4133 the extend. But don't do this if the type of EXP is a subtype
4134 of something else since then the conversion might involve
4135 more than just converting modes. */
4136 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4137 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4139 if (TREE_UNSIGNED (TREE_TYPE (exp))
4140 != SUBREG_PROMOTED_UNSIGNED_P (target))
4143 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4147 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4148 SUBREG_PROMOTED_UNSIGNED_P (target)),
4152 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4154 /* If TEMP is a volatile MEM and we want a result value, make
4155 the access now so it gets done only once. Likewise if
4156 it contains TARGET. */
4157 if (GET_CODE (temp) == MEM && want_value
4158 && (MEM_VOLATILE_P (temp)
4159 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4160 temp = copy_to_reg (temp);
4162 /* If TEMP is a VOIDmode constant, use convert_modes to make
4163 sure that we properly convert it. */
4164 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4166 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4167 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4168 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4169 GET_MODE (target), temp,
4170 SUBREG_PROMOTED_UNSIGNED_P (target));
4173 convert_move (SUBREG_REG (target), temp,
4174 SUBREG_PROMOTED_UNSIGNED_P (target));
4176 /* If we promoted a constant, change the mode back down to match
4177 target. Otherwise, the caller might get confused by a result whose
4178 mode is larger than expected. */
4180 if (want_value && GET_MODE (temp) != GET_MODE (target)
4181 && GET_MODE (temp) != VOIDmode)
4183 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4184 SUBREG_PROMOTED_VAR_P (temp) = 1;
4185 SUBREG_PROMOTED_UNSIGNED_P (temp)
4186 = SUBREG_PROMOTED_UNSIGNED_P (target);
4189 return want_value ? temp : NULL_RTX;
4193 temp = expand_expr (exp, target, GET_MODE (target), 0);
4194 /* Return TARGET if it's a specified hardware register.
4195 If TARGET is a volatile mem ref, either return TARGET
4196 or return a reg copied *from* TARGET; ANSI requires this.
4198 Otherwise, if TEMP is not TARGET, return TEMP
4199 if it is constant (for efficiency),
4200 or if we really want the correct value. */
4201 if (!(target && GET_CODE (target) == REG
4202 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4203 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4204 && ! rtx_equal_p (temp, target)
4205 && (CONSTANT_P (temp) || want_value))
4206 dont_return_target = 1;
4209 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4210 the same as that of TARGET, adjust the constant. This is needed, for
4211 example, in case it is a CONST_DOUBLE and we want only a word-sized
4213 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4214 && TREE_CODE (exp) != ERROR_MARK
4215 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4216 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4217 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4219 if (current_function_check_memory_usage
4220 && GET_CODE (target) == MEM
4221 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4223 in_check_memory_usage = 1;
4224 if (GET_CODE (temp) == MEM)
4225 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4226 VOIDmode, 3, XEXP (target, 0), Pmode,
4227 XEXP (temp, 0), Pmode,
4228 expr_size (exp), TYPE_MODE (sizetype));
4230 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4231 VOIDmode, 3, XEXP (target, 0), Pmode,
4232 expr_size (exp), TYPE_MODE (sizetype),
4233 GEN_INT (MEMORY_USE_WO),
4234 TYPE_MODE (integer_type_node));
4235 in_check_memory_usage = 0;
4238 /* If value was not generated in the target, store it there.
4239 Convert the value to TARGET's type first if nec. */
4240 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4241 one or both of them are volatile memory refs, we have to distinguish
4243 - expand_expr has used TARGET. In this case, we must not generate
4244 another copy. This can be detected by TARGET being equal according
4246 - expand_expr has not used TARGET - that means that the source just
4247 happens to have the same RTX form. Since temp will have been created
4248 by expand_expr, it will compare unequal according to == .
4249 We must generate a copy in this case, to reach the correct number
4250 of volatile memory references. */
4252 if ((! rtx_equal_p (temp, target)
4253 || (temp != target && (side_effects_p (temp)
4254 || side_effects_p (target))))
4255 && TREE_CODE (exp) != ERROR_MARK
4256 && ! dont_store_target)
4258 target = protect_from_queue (target, 1);
4259 if (GET_MODE (temp) != GET_MODE (target)
4260 && GET_MODE (temp) != VOIDmode)
4262 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4263 if (dont_return_target)
4265 /* In this case, we will return TEMP,
4266 so make sure it has the proper mode.
4267 But don't forget to store the value into TARGET. */
4268 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4269 emit_move_insn (target, temp);
4272 convert_move (target, temp, unsignedp);
4275 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4277 /* Handle copying a string constant into an array.
4278 The string constant may be shorter than the array.
4279 So copy just the string's actual length, and clear the rest. */
4283 /* Get the size of the data type of the string,
4284 which is actually the size of the target. */
4285 size = expr_size (exp);
4286 if (GET_CODE (size) == CONST_INT
4287 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4288 emit_block_move (target, temp, size);
4291 /* Compute the size of the data to copy from the string. */
4293 = size_binop (MIN_EXPR,
4294 make_tree (sizetype, size),
4295 size_int (TREE_STRING_LENGTH (exp)));
4296 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4300 /* Copy that much. */
4301 emit_block_move (target, temp, copy_size_rtx);
4303 /* Figure out how much is left in TARGET that we have to clear.
4304 Do all calculations in ptr_mode. */
4306 addr = XEXP (target, 0);
4307 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4309 if (GET_CODE (copy_size_rtx) == CONST_INT)
4311 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4312 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4316 addr = force_reg (ptr_mode, addr);
4317 addr = expand_binop (ptr_mode, add_optab, addr,
4318 copy_size_rtx, NULL_RTX, 0,
4321 size = expand_binop (ptr_mode, sub_optab, size,
4322 copy_size_rtx, NULL_RTX, 0,
4325 label = gen_label_rtx ();
4326 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4327 GET_MODE (size), 0, 0, label);
4330 if (size != const0_rtx)
4332 rtx dest = gen_rtx_MEM (BLKmode, addr);
4334 MEM_COPY_ATTRIBUTES (dest, target);
4336 /* Be sure we can write on ADDR. */
4337 in_check_memory_usage = 1;
4338 if (current_function_check_memory_usage)
4339 emit_library_call (chkr_check_addr_libfunc,
4340 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4342 size, TYPE_MODE (sizetype),
4343 GEN_INT (MEMORY_USE_WO),
4344 TYPE_MODE (integer_type_node));
4345 in_check_memory_usage = 0;
4346 clear_storage (dest, size);
4353 /* Handle calls that return values in multiple non-contiguous locations.
4354 The Irix 6 ABI has examples of this. */
4355 else if (GET_CODE (target) == PARALLEL)
4356 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4357 else if (GET_MODE (temp) == BLKmode)
4358 emit_block_move (target, temp, expr_size (exp));
4360 emit_move_insn (target, temp);
4363 /* If we don't want a value, return NULL_RTX. */
4367 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4368 ??? The latter test doesn't seem to make sense. */
4369 else if (dont_return_target && GET_CODE (temp) != MEM)
4372 /* Return TARGET itself if it is a hard register. */
4373 else if (want_value && GET_MODE (target) != BLKmode
4374 && ! (GET_CODE (target) == REG
4375 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4376 return copy_to_reg (target);
4382 /* Return 1 if EXP just contains zeros. */
4390 switch (TREE_CODE (exp))
4394 case NON_LVALUE_EXPR:
4395 return is_zeros_p (TREE_OPERAND (exp, 0));
4398 return integer_zerop (exp);
4402 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4405 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4408 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4409 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4410 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4411 if (! is_zeros_p (TREE_VALUE (elt)))
4421 /* Return 1 if EXP contains mostly (3/4) zeros. */
4424 mostly_zeros_p (exp)
4427 if (TREE_CODE (exp) == CONSTRUCTOR)
4429 int elts = 0, zeros = 0;
4430 tree elt = CONSTRUCTOR_ELTS (exp);
4431 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4433 /* If there are no ranges of true bits, it is all zero. */
4434 return elt == NULL_TREE;
4436 for (; elt; elt = TREE_CHAIN (elt))
4438 /* We do not handle the case where the index is a RANGE_EXPR,
4439 so the statistic will be somewhat inaccurate.
4440 We do make a more accurate count in store_constructor itself,
4441 so since this function is only used for nested array elements,
4442 this should be close enough. */
4443 if (mostly_zeros_p (TREE_VALUE (elt)))
4448 return 4 * zeros >= 3 * elts;
4451 return is_zeros_p (exp);
4454 /* Helper function for store_constructor.
4455 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4456 TYPE is the type of the CONSTRUCTOR, not the element type.
4457 CLEARED is as for store_constructor.
4458 ALIAS_SET is the alias set to use for any stores.
4460 This provides a recursive shortcut back to store_constructor when it isn't
4461 necessary to go through store_field. This is so that we can pass through
4462 the cleared field to let store_constructor know that we may not have to
4463 clear a substructure if the outer structure has already been cleared. */
4466 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4469 unsigned HOST_WIDE_INT bitsize;
4470 HOST_WIDE_INT bitpos;
4471 enum machine_mode mode;
4476 if (TREE_CODE (exp) == CONSTRUCTOR
4477 && bitpos % BITS_PER_UNIT == 0
4478 /* If we have a non-zero bitpos for a register target, then we just
4479 let store_field do the bitfield handling. This is unlikely to
4480 generate unnecessary clear instructions anyways. */
4481 && (bitpos == 0 || GET_CODE (target) == MEM))
4483 if (GET_CODE (target) == MEM)
4485 = adjust_address (target,
4486 GET_MODE (target) == BLKmode
4488 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4489 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4492 /* Update the alias set, if required. */
4493 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4494 && MEM_ALIAS_SET (target) != 0)
4496 target = copy_rtx (target);
4497 set_mem_alias_set (target, alias_set);
4500 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4503 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4504 int_size_in_bytes (type), alias_set);
4507 /* Store the value of constructor EXP into the rtx TARGET.
4508 TARGET is either a REG or a MEM; we know it cannot conflict, since
4509 safe_from_p has been called.
4510 CLEARED is true if TARGET is known to have been zero'd.
4511 SIZE is the number of bytes of TARGET we are allowed to modify: this
4512 may not be the same as the size of EXP if we are assigning to a field
4513 which has been packed to exclude padding bits. */
4516 store_constructor (exp, target, cleared, size)
4522 tree type = TREE_TYPE (exp);
4523 #ifdef WORD_REGISTER_OPERATIONS
4524 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4527 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4528 || TREE_CODE (type) == QUAL_UNION_TYPE)
4532 /* We either clear the aggregate or indicate the value is dead. */
4533 if ((TREE_CODE (type) == UNION_TYPE
4534 || TREE_CODE (type) == QUAL_UNION_TYPE)
4536 && ! CONSTRUCTOR_ELTS (exp))
4537 /* If the constructor is empty, clear the union. */
4539 clear_storage (target, expr_size (exp));
4543 /* If we are building a static constructor into a register,
4544 set the initial value as zero so we can fold the value into
4545 a constant. But if more than one register is involved,
4546 this probably loses. */
4547 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4548 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4550 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4554 /* If the constructor has fewer fields than the structure
4555 or if we are initializing the structure to mostly zeros,
4556 clear the whole structure first. Don't do this if TARGET is a
4557 register whose mode size isn't equal to SIZE since clear_storage
4558 can't handle this case. */
4559 else if (! cleared && size > 0
4560 && ((list_length (CONSTRUCTOR_ELTS (exp))
4561 != fields_length (type))
4562 || mostly_zeros_p (exp))
4563 && (GET_CODE (target) != REG
4564 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4567 clear_storage (target, GEN_INT (size));
4572 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4574 /* Store each element of the constructor into
4575 the corresponding field of TARGET. */
4577 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4579 tree field = TREE_PURPOSE (elt);
4580 #ifdef WORD_REGISTER_OPERATIONS
4581 tree value = TREE_VALUE (elt);
4583 enum machine_mode mode;
4584 HOST_WIDE_INT bitsize;
4585 HOST_WIDE_INT bitpos = 0;
4588 rtx to_rtx = target;
4590 /* Just ignore missing fields.
4591 We cleared the whole structure, above,
4592 if any fields are missing. */
4596 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4599 if (host_integerp (DECL_SIZE (field), 1))
4600 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4604 unsignedp = TREE_UNSIGNED (field);
4605 mode = DECL_MODE (field);
4606 if (DECL_BIT_FIELD (field))
4609 offset = DECL_FIELD_OFFSET (field);
4610 if (host_integerp (offset, 0)
4611 && host_integerp (bit_position (field), 0))
4613 bitpos = int_bit_position (field);
4617 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4623 if (contains_placeholder_p (offset))
4624 offset = build (WITH_RECORD_EXPR, sizetype,
4625 offset, make_tree (TREE_TYPE (exp), target));
4627 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4628 if (GET_CODE (to_rtx) != MEM)
4631 if (GET_MODE (offset_rtx) != ptr_mode)
4632 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4634 #ifdef POINTERS_EXTEND_UNSIGNED
4635 if (GET_MODE (offset_rtx) != Pmode)
4636 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4639 to_rtx = offset_address (to_rtx, offset_rtx,
4640 highest_pow2_factor (offset));
4643 if (TREE_READONLY (field))
4645 if (GET_CODE (to_rtx) == MEM)
4646 to_rtx = copy_rtx (to_rtx);
4648 RTX_UNCHANGING_P (to_rtx) = 1;
4651 #ifdef WORD_REGISTER_OPERATIONS
4652 /* If this initializes a field that is smaller than a word, at the
4653 start of a word, try to widen it to a full word.
4654 This special case allows us to output C++ member function
4655 initializations in a form that the optimizers can understand. */
4656 if (GET_CODE (target) == REG
4657 && bitsize < BITS_PER_WORD
4658 && bitpos % BITS_PER_WORD == 0
4659 && GET_MODE_CLASS (mode) == MODE_INT
4660 && TREE_CODE (value) == INTEGER_CST
4662 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4664 tree type = TREE_TYPE (value);
4666 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4668 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4669 value = convert (type, value);
4672 if (BYTES_BIG_ENDIAN)
4674 = fold (build (LSHIFT_EXPR, type, value,
4675 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4676 bitsize = BITS_PER_WORD;
4681 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4682 && DECL_NONADDRESSABLE_P (field))
4684 to_rtx = copy_rtx (to_rtx);
4685 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4688 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4689 TREE_VALUE (elt), type, cleared,
4690 get_alias_set (TREE_TYPE (field)));
4693 else if (TREE_CODE (type) == ARRAY_TYPE)
4698 tree domain = TYPE_DOMAIN (type);
4699 tree elttype = TREE_TYPE (type);
4700 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4701 && TYPE_MAX_VALUE (domain)
4702 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4703 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4704 HOST_WIDE_INT minelt = 0;
4705 HOST_WIDE_INT maxelt = 0;
4707 /* If we have constant bounds for the range of the type, get them. */
4710 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4711 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4714 /* If the constructor has fewer elements than the array,
4715 clear the whole array first. Similarly if this is
4716 static constructor of a non-BLKmode object. */
4717 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4721 HOST_WIDE_INT count = 0, zero_count = 0;
4722 need_to_clear = ! const_bounds_p;
4724 /* This loop is a more accurate version of the loop in
4725 mostly_zeros_p (it handles RANGE_EXPR in an index).
4726 It is also needed to check for missing elements. */
4727 for (elt = CONSTRUCTOR_ELTS (exp);
4728 elt != NULL_TREE && ! need_to_clear;
4729 elt = TREE_CHAIN (elt))
4731 tree index = TREE_PURPOSE (elt);
4732 HOST_WIDE_INT this_node_count;
4734 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4736 tree lo_index = TREE_OPERAND (index, 0);
4737 tree hi_index = TREE_OPERAND (index, 1);
4739 if (! host_integerp (lo_index, 1)
4740 || ! host_integerp (hi_index, 1))
4746 this_node_count = (tree_low_cst (hi_index, 1)
4747 - tree_low_cst (lo_index, 1) + 1);
4750 this_node_count = 1;
4752 count += this_node_count;
4753 if (mostly_zeros_p (TREE_VALUE (elt)))
4754 zero_count += this_node_count;
4757 /* Clear the entire array first if there are any missing elements,
4758 or if the incidence of zero elements is >= 75%. */
4760 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4764 if (need_to_clear && size > 0)
4767 clear_storage (target, GEN_INT (size));
4770 else if (REG_P (target))
4771 /* Inform later passes that the old value is dead. */
4772 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4774 /* Store each element of the constructor into
4775 the corresponding element of TARGET, determined
4776 by counting the elements. */
4777 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4779 elt = TREE_CHAIN (elt), i++)
4781 enum machine_mode mode;
4782 HOST_WIDE_INT bitsize;
4783 HOST_WIDE_INT bitpos;
4785 tree value = TREE_VALUE (elt);
4786 tree index = TREE_PURPOSE (elt);
4787 rtx xtarget = target;
4789 if (cleared && is_zeros_p (value))
4792 unsignedp = TREE_UNSIGNED (elttype);
4793 mode = TYPE_MODE (elttype);
4794 if (mode == BLKmode)
4795 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4796 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4799 bitsize = GET_MODE_BITSIZE (mode);
4801 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4803 tree lo_index = TREE_OPERAND (index, 0);
4804 tree hi_index = TREE_OPERAND (index, 1);
4805 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4806 struct nesting *loop;
4807 HOST_WIDE_INT lo, hi, count;
4810 /* If the range is constant and "small", unroll the loop. */
4812 && host_integerp (lo_index, 0)
4813 && host_integerp (hi_index, 0)
4814 && (lo = tree_low_cst (lo_index, 0),
4815 hi = tree_low_cst (hi_index, 0),
4816 count = hi - lo + 1,
4817 (GET_CODE (target) != MEM
4819 || (host_integerp (TYPE_SIZE (elttype), 1)
4820 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4823 lo -= minelt; hi -= minelt;
4824 for (; lo <= hi; lo++)
4826 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4828 if (GET_CODE (target) == MEM
4829 && !MEM_KEEP_ALIAS_SET_P (target)
4830 && TYPE_NONALIASED_COMPONENT (type))
4832 target = copy_rtx (target);
4833 MEM_KEEP_ALIAS_SET_P (target) = 1;
4836 store_constructor_field
4837 (target, bitsize, bitpos, mode, value, type, cleared,
4838 get_alias_set (elttype));
4843 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4844 loop_top = gen_label_rtx ();
4845 loop_end = gen_label_rtx ();
4847 unsignedp = TREE_UNSIGNED (domain);
4849 index = build_decl (VAR_DECL, NULL_TREE, domain);
4852 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4854 SET_DECL_RTL (index, index_r);
4855 if (TREE_CODE (value) == SAVE_EXPR
4856 && SAVE_EXPR_RTL (value) == 0)
4858 /* Make sure value gets expanded once before the
4860 expand_expr (value, const0_rtx, VOIDmode, 0);
4863 store_expr (lo_index, index_r, 0);
4864 loop = expand_start_loop (0);
4866 /* Assign value to element index. */
4868 = convert (ssizetype,
4869 fold (build (MINUS_EXPR, TREE_TYPE (index),
4870 index, TYPE_MIN_VALUE (domain))));
4871 position = size_binop (MULT_EXPR, position,
4873 TYPE_SIZE_UNIT (elttype)));
4875 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4876 xtarget = offset_address (target, pos_rtx,
4877 highest_pow2_factor (position));
4878 xtarget = adjust_address (xtarget, mode, 0);
4879 if (TREE_CODE (value) == CONSTRUCTOR)
4880 store_constructor (value, xtarget, cleared,
4881 bitsize / BITS_PER_UNIT);
4883 store_expr (value, xtarget, 0);
4885 expand_exit_loop_if_false (loop,
4886 build (LT_EXPR, integer_type_node,
4889 expand_increment (build (PREINCREMENT_EXPR,
4891 index, integer_one_node), 0, 0);
4893 emit_label (loop_end);
4896 else if ((index != 0 && ! host_integerp (index, 0))
4897 || ! host_integerp (TYPE_SIZE (elttype), 1))
4902 index = ssize_int (1);
4905 index = convert (ssizetype,
4906 fold (build (MINUS_EXPR, index,
4907 TYPE_MIN_VALUE (domain))));
4909 position = size_binop (MULT_EXPR, index,
4911 TYPE_SIZE_UNIT (elttype)));
4912 xtarget = offset_address (target,
4913 expand_expr (position, 0, VOIDmode, 0),
4914 highest_pow2_factor (position));
4915 xtarget = adjust_address (xtarget, mode, 0);
4916 store_expr (value, xtarget, 0);
4921 bitpos = ((tree_low_cst (index, 0) - minelt)
4922 * tree_low_cst (TYPE_SIZE (elttype), 1));
4924 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4926 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4927 && TYPE_NONALIASED_COMPONENT (type))
4929 target = copy_rtx (target);
4930 MEM_KEEP_ALIAS_SET_P (target) = 1;
4933 store_constructor_field (target, bitsize, bitpos, mode, value,
4934 type, cleared, get_alias_set (elttype));
4940 /* Set constructor assignments. */
4941 else if (TREE_CODE (type) == SET_TYPE)
4943 tree elt = CONSTRUCTOR_ELTS (exp);
4944 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4945 tree domain = TYPE_DOMAIN (type);
4946 tree domain_min, domain_max, bitlength;
4948 /* The default implementation strategy is to extract the constant
4949 parts of the constructor, use that to initialize the target,
4950 and then "or" in whatever non-constant ranges we need in addition.
4952 If a large set is all zero or all ones, it is
4953 probably better to set it using memset (if available) or bzero.
4954 Also, if a large set has just a single range, it may also be
4955 better to first clear all the first clear the set (using
4956 bzero/memset), and set the bits we want. */
4958 /* Check for all zeros. */
4959 if (elt == NULL_TREE && size > 0)
4962 clear_storage (target, GEN_INT (size));
4966 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4967 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4968 bitlength = size_binop (PLUS_EXPR,
4969 size_diffop (domain_max, domain_min),
4972 nbits = tree_low_cst (bitlength, 1);
4974 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4975 are "complicated" (more than one range), initialize (the
4976 constant parts) by copying from a constant. */
4977 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4978 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4980 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4981 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4982 char *bit_buffer = (char *) alloca (nbits);
4983 HOST_WIDE_INT word = 0;
4984 unsigned int bit_pos = 0;
4985 unsigned int ibit = 0;
4986 unsigned int offset = 0; /* In bytes from beginning of set. */
4988 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4991 if (bit_buffer[ibit])
4993 if (BYTES_BIG_ENDIAN)
4994 word |= (1 << (set_word_size - 1 - bit_pos));
4996 word |= 1 << bit_pos;
5000 if (bit_pos >= set_word_size || ibit == nbits)
5002 if (word != 0 || ! cleared)
5004 rtx datum = GEN_INT (word);
5007 /* The assumption here is that it is safe to use
5008 XEXP if the set is multi-word, but not if
5009 it's single-word. */
5010 if (GET_CODE (target) == MEM)
5011 to_rtx = adjust_address (target, mode, offset);
5012 else if (offset == 0)
5016 emit_move_insn (to_rtx, datum);
5023 offset += set_word_size / BITS_PER_UNIT;
5028 /* Don't bother clearing storage if the set is all ones. */
5029 if (TREE_CHAIN (elt) != NULL_TREE
5030 || (TREE_PURPOSE (elt) == NULL_TREE
5032 : ( ! host_integerp (TREE_VALUE (elt), 0)
5033 || ! host_integerp (TREE_PURPOSE (elt), 0)
5034 || (tree_low_cst (TREE_VALUE (elt), 0)
5035 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5036 != (HOST_WIDE_INT) nbits))))
5037 clear_storage (target, expr_size (exp));
5039 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5041 /* Start of range of element or NULL. */
5042 tree startbit = TREE_PURPOSE (elt);
5043 /* End of range of element, or element value. */
5044 tree endbit = TREE_VALUE (elt);
5045 #ifdef TARGET_MEM_FUNCTIONS
5046 HOST_WIDE_INT startb, endb;
5048 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5050 bitlength_rtx = expand_expr (bitlength,
5051 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5053 /* Handle non-range tuple element like [ expr ]. */
5054 if (startbit == NULL_TREE)
5056 startbit = save_expr (endbit);
5060 startbit = convert (sizetype, startbit);
5061 endbit = convert (sizetype, endbit);
5062 if (! integer_zerop (domain_min))
5064 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5065 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5067 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5068 EXPAND_CONST_ADDRESS);
5069 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5070 EXPAND_CONST_ADDRESS);
5076 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5079 emit_move_insn (targetx, target);
5082 else if (GET_CODE (target) == MEM)
5087 #ifdef TARGET_MEM_FUNCTIONS
5088 /* Optimization: If startbit and endbit are
5089 constants divisible by BITS_PER_UNIT,
5090 call memset instead. */
5091 if (TREE_CODE (startbit) == INTEGER_CST
5092 && TREE_CODE (endbit) == INTEGER_CST
5093 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5094 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5096 emit_library_call (memset_libfunc, LCT_NORMAL,
5098 plus_constant (XEXP (targetx, 0),
5099 startb / BITS_PER_UNIT),
5101 constm1_rtx, TYPE_MODE (integer_type_node),
5102 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5103 TYPE_MODE (sizetype));
5107 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5108 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5109 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5110 startbit_rtx, TYPE_MODE (sizetype),
5111 endbit_rtx, TYPE_MODE (sizetype));
5114 emit_move_insn (target, targetx);
5122 /* Store the value of EXP (an expression tree)
5123 into a subfield of TARGET which has mode MODE and occupies
5124 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5125 If MODE is VOIDmode, it means that we are storing into a bit-field.
5127 If VALUE_MODE is VOIDmode, return nothing in particular.
5128 UNSIGNEDP is not used in this case.
5130 Otherwise, return an rtx for the value stored. This rtx
5131 has mode VALUE_MODE if that is convenient to do.
5132 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5134 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5136 ALIAS_SET is the alias set for the destination. This value will
5137 (in general) be different from that for TARGET, since TARGET is a
5138 reference to the containing structure. */
5141 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp,
5142 total_size, alias_set)
5144 HOST_WIDE_INT bitsize;
5145 HOST_WIDE_INT bitpos;
5146 enum machine_mode mode;
5148 enum machine_mode value_mode;
5150 HOST_WIDE_INT total_size;
5153 HOST_WIDE_INT width_mask = 0;
5155 if (TREE_CODE (exp) == ERROR_MARK)
5158 /* If we have nothing to store, do nothing unless the expression has
5161 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5163 if (bitsize < HOST_BITS_PER_WIDE_INT)
5164 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5166 /* If we are storing into an unaligned field of an aligned union that is
5167 in a register, we may have the mode of TARGET being an integer mode but
5168 MODE == BLKmode. In that case, get an aligned object whose size and
5169 alignment are the same as TARGET and store TARGET into it (we can avoid
5170 the store if the field being stored is the entire width of TARGET). Then
5171 call ourselves recursively to store the field into a BLKmode version of
5172 that object. Finally, load from the object into TARGET. This is not
5173 very efficient in general, but should only be slightly more expensive
5174 than the otherwise-required unaligned accesses. Perhaps this can be
5175 cleaned up later. */
5178 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5182 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5185 rtx blk_object = copy_rtx (object);
5187 PUT_MODE (blk_object, BLKmode);
5188 set_mem_alias_set (blk_object, 0);
5190 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5191 emit_move_insn (object, target);
5193 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5194 total_size, alias_set);
5196 /* Even though we aren't returning target, we need to
5197 give it the updated value. */
5198 emit_move_insn (target, object);
5203 if (GET_CODE (target) == CONCAT)
5205 /* We're storing into a struct containing a single __complex. */
5209 return store_expr (exp, target, 0);
5212 /* If the structure is in a register or if the component
5213 is a bit field, we cannot use addressing to access it.
5214 Use bit-field techniques or SUBREG to store in it. */
5216 if (mode == VOIDmode
5217 || (mode != BLKmode && ! direct_store[(int) mode]
5218 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5219 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5220 || GET_CODE (target) == REG
5221 || GET_CODE (target) == SUBREG
5222 /* If the field isn't aligned enough to store as an ordinary memref,
5223 store it as a bit field. */
5224 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5225 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5226 || bitpos % GET_MODE_ALIGNMENT (mode)))
5227 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5228 && (TYPE_ALIGN (TREE_TYPE (exp)) > MEM_ALIGN (target)
5229 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5230 /* If the RHS and field are a constant size and the size of the
5231 RHS isn't the same size as the bitfield, we must use bitfield
5234 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5235 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5237 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5239 /* If BITSIZE is narrower than the size of the type of EXP
5240 we will be narrowing TEMP. Normally, what's wanted are the
5241 low-order bits. However, if EXP's type is a record and this is
5242 big-endian machine, we want the upper BITSIZE bits. */
5243 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5244 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5245 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5246 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5247 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5251 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5253 if (mode != VOIDmode && mode != BLKmode
5254 && mode != TYPE_MODE (TREE_TYPE (exp)))
5255 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5257 /* If the modes of TARGET and TEMP are both BLKmode, both
5258 must be in memory and BITPOS must be aligned on a byte
5259 boundary. If so, we simply do a block copy. */
5260 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5262 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5263 || bitpos % BITS_PER_UNIT != 0)
5266 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5267 emit_block_move (target, temp,
5268 bitsize == -1 ? expr_size (exp)
5269 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5272 return value_mode == VOIDmode ? const0_rtx : target;
5275 /* Store the value in the bitfield. */
5276 store_bit_field (target, bitsize, bitpos, mode, temp, total_size);
5277 if (value_mode != VOIDmode)
5279 /* The caller wants an rtx for the value.
5280 If possible, avoid refetching from the bitfield itself. */
5282 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5285 enum machine_mode tmode;
5288 return expand_and (temp,
5292 GET_MODE (temp) == VOIDmode
5294 : GET_MODE (temp))), NULL_RTX);
5296 tmode = GET_MODE (temp);
5297 if (tmode == VOIDmode)
5299 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5300 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5301 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5304 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5305 NULL_RTX, value_mode, VOIDmode,
5312 rtx addr = XEXP (target, 0);
5315 /* If a value is wanted, it must be the lhs;
5316 so make the address stable for multiple use. */
5318 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5319 && ! CONSTANT_ADDRESS_P (addr)
5320 /* A frame-pointer reference is already stable. */
5321 && ! (GET_CODE (addr) == PLUS
5322 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5323 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5324 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5325 target = replace_equiv_address (target, copy_to_reg (addr));
5327 /* Now build a reference to just the desired component. */
5329 to_rtx = copy_rtx (adjust_address (target, mode,
5330 bitpos / BITS_PER_UNIT));
5332 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5333 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5335 to_rtx = copy_rtx (to_rtx);
5336 set_mem_alias_set (to_rtx, alias_set);
5339 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5343 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5344 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5345 codes and find the ultimate containing object, which we return.
5347 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5348 bit position, and *PUNSIGNEDP to the signedness of the field.
5349 If the position of the field is variable, we store a tree
5350 giving the variable offset (in units) in *POFFSET.
5351 This offset is in addition to the bit position.
5352 If the position is not variable, we store 0 in *POFFSET.
5353 We set *PALIGNMENT to the alignment of the address that will be
5354 computed. This is the alignment of the thing we return if *POFFSET
5355 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5357 If any of the extraction expressions is volatile,
5358 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5360 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5361 is a mode that can be used to access the field. In that case, *PBITSIZE
5364 If the field describes a variable-sized object, *PMODE is set to
5365 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5366 this case, but the address of the object can be found. */
5369 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5370 punsignedp, pvolatilep, palignment)
5372 HOST_WIDE_INT *pbitsize;
5373 HOST_WIDE_INT *pbitpos;
5375 enum machine_mode *pmode;
5378 unsigned int *palignment;
5381 enum machine_mode mode = VOIDmode;
5382 tree offset = size_zero_node;
5383 tree bit_offset = bitsize_zero_node;
5384 unsigned int alignment = BIGGEST_ALIGNMENT;
5385 tree placeholder_ptr = 0;
5388 /* First get the mode, signedness, and size. We do this from just the
5389 outermost expression. */
5390 if (TREE_CODE (exp) == COMPONENT_REF)
5392 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5393 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5394 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5396 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5398 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5400 size_tree = TREE_OPERAND (exp, 1);
5401 *punsignedp = TREE_UNSIGNED (exp);
5405 mode = TYPE_MODE (TREE_TYPE (exp));
5406 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5408 if (mode == BLKmode)
5409 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5411 *pbitsize = GET_MODE_BITSIZE (mode);
5416 if (! host_integerp (size_tree, 1))
5417 mode = BLKmode, *pbitsize = -1;
5419 *pbitsize = tree_low_cst (size_tree, 1);
5422 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5423 and find the ultimate containing object. */
5426 if (TREE_CODE (exp) == BIT_FIELD_REF)
5427 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5428 else if (TREE_CODE (exp) == COMPONENT_REF)
5430 tree field = TREE_OPERAND (exp, 1);
5431 tree this_offset = DECL_FIELD_OFFSET (field);
5433 /* If this field hasn't been filled in yet, don't go
5434 past it. This should only happen when folding expressions
5435 made during type construction. */
5436 if (this_offset == 0)
5438 else if (! TREE_CONSTANT (this_offset)
5439 && contains_placeholder_p (this_offset))
5440 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5442 offset = size_binop (PLUS_EXPR, offset, this_offset);
5443 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5444 DECL_FIELD_BIT_OFFSET (field));
5446 if (! host_integerp (offset, 0))
5447 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5450 else if (TREE_CODE (exp) == ARRAY_REF
5451 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5453 tree index = TREE_OPERAND (exp, 1);
5454 tree array = TREE_OPERAND (exp, 0);
5455 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5456 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5457 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5459 /* We assume all arrays have sizes that are a multiple of a byte.
5460 First subtract the lower bound, if any, in the type of the
5461 index, then convert to sizetype and multiply by the size of the
5463 if (low_bound != 0 && ! integer_zerop (low_bound))
5464 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5467 /* If the index has a self-referential type, pass it to a
5468 WITH_RECORD_EXPR; if the component size is, pass our
5469 component to one. */
5470 if (! TREE_CONSTANT (index)
5471 && contains_placeholder_p (index))
5472 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5473 if (! TREE_CONSTANT (unit_size)
5474 && contains_placeholder_p (unit_size))
5475 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5477 offset = size_binop (PLUS_EXPR, offset,
5478 size_binop (MULT_EXPR,
5479 convert (sizetype, index),
5483 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5485 tree new = find_placeholder (exp, &placeholder_ptr);
5487 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5488 We might have been called from tree optimization where we
5489 haven't set up an object yet. */
5497 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5498 && ! ((TREE_CODE (exp) == NOP_EXPR
5499 || TREE_CODE (exp) == CONVERT_EXPR)
5500 && (TYPE_MODE (TREE_TYPE (exp))
5501 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5504 /* If any reference in the chain is volatile, the effect is volatile. */
5505 if (TREE_THIS_VOLATILE (exp))
5508 /* If the offset is non-constant already, then we can't assume any
5509 alignment more than the alignment here. */
5510 if (! TREE_CONSTANT (offset))
5511 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5513 exp = TREE_OPERAND (exp, 0);
5517 alignment = MIN (alignment, DECL_ALIGN (exp));
5518 else if (TREE_TYPE (exp) != 0)
5519 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5521 /* If OFFSET is constant, see if we can return the whole thing as a
5522 constant bit position. Otherwise, split it up. */
5523 if (host_integerp (offset, 0)
5524 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5526 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5527 && host_integerp (tem, 0))
5528 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5530 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5533 *palignment = alignment;
5537 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5539 static enum memory_use_mode
5540 get_memory_usage_from_modifier (modifier)
5541 enum expand_modifier modifier;
5547 return MEMORY_USE_RO;
5549 case EXPAND_MEMORY_USE_WO:
5550 return MEMORY_USE_WO;
5552 case EXPAND_MEMORY_USE_RW:
5553 return MEMORY_USE_RW;
5555 case EXPAND_MEMORY_USE_DONT:
5556 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5557 MEMORY_USE_DONT, because they are modifiers to a call of
5558 expand_expr in the ADDR_EXPR case of expand_expr. */
5559 case EXPAND_CONST_ADDRESS:
5560 case EXPAND_INITIALIZER:
5561 return MEMORY_USE_DONT;
5562 case EXPAND_MEMORY_USE_BAD:
5568 /* Given an rtx VALUE that may contain additions and multiplications, return
5569 an equivalent value that just refers to a register, memory, or constant.
5570 This is done by generating instructions to perform the arithmetic and
5571 returning a pseudo-register containing the value.
5573 The returned value may be a REG, SUBREG, MEM or constant. */
5576 force_operand (value, target)
5580 /* Use a temporary to force order of execution of calls to
5584 /* Use subtarget as the target for operand 0 of a binary operation. */
5585 rtx subtarget = get_subtarget (target);
5587 /* Check for a PIC address load. */
5589 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5590 && XEXP (value, 0) == pic_offset_table_rtx
5591 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5592 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5593 || GET_CODE (XEXP (value, 1)) == CONST))
5596 subtarget = gen_reg_rtx (GET_MODE (value));
5597 emit_move_insn (subtarget, value);
5601 if (GET_CODE (value) == PLUS)
5602 binoptab = add_optab;
5603 else if (GET_CODE (value) == MINUS)
5604 binoptab = sub_optab;
5605 else if (GET_CODE (value) == MULT)
5607 op2 = XEXP (value, 1);
5608 if (!CONSTANT_P (op2)
5609 && !(GET_CODE (op2) == REG && op2 != subtarget))
5611 tmp = force_operand (XEXP (value, 0), subtarget);
5612 return expand_mult (GET_MODE (value), tmp,
5613 force_operand (op2, NULL_RTX),
5619 op2 = XEXP (value, 1);
5620 if (!CONSTANT_P (op2)
5621 && !(GET_CODE (op2) == REG && op2 != subtarget))
5623 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5625 binoptab = add_optab;
5626 op2 = negate_rtx (GET_MODE (value), op2);
5629 /* Check for an addition with OP2 a constant integer and our first
5630 operand a PLUS of a virtual register and something else. In that
5631 case, we want to emit the sum of the virtual register and the
5632 constant first and then add the other value. This allows virtual
5633 register instantiation to simply modify the constant rather than
5634 creating another one around this addition. */
5635 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5636 && GET_CODE (XEXP (value, 0)) == PLUS
5637 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5638 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5639 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5641 rtx temp = expand_binop (GET_MODE (value), binoptab,
5642 XEXP (XEXP (value, 0), 0), op2,
5643 subtarget, 0, OPTAB_LIB_WIDEN);
5644 return expand_binop (GET_MODE (value), binoptab, temp,
5645 force_operand (XEXP (XEXP (value, 0), 1), 0),
5646 target, 0, OPTAB_LIB_WIDEN);
5649 tmp = force_operand (XEXP (value, 0), subtarget);
5650 return expand_binop (GET_MODE (value), binoptab, tmp,
5651 force_operand (op2, NULL_RTX),
5652 target, 0, OPTAB_LIB_WIDEN);
5653 /* We give UNSIGNEDP = 0 to expand_binop
5654 because the only operations we are expanding here are signed ones. */
5659 /* Subroutine of expand_expr: return nonzero iff there is no way that
5660 EXP can reference X, which is being modified. TOP_P is nonzero if this
5661 call is going to be used to determine whether we need a temporary
5662 for EXP, as opposed to a recursive call to this function.
5664 It is always safe for this routine to return zero since it merely
5665 searches for optimization opportunities. */
5668 safe_from_p (x, exp, top_p)
5675 static tree save_expr_list;
5678 /* If EXP has varying size, we MUST use a target since we currently
5679 have no way of allocating temporaries of variable size
5680 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5681 So we assume here that something at a higher level has prevented a
5682 clash. This is somewhat bogus, but the best we can do. Only
5683 do this when X is BLKmode and when we are at the top level. */
5684 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5685 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5686 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5687 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5688 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5690 && GET_MODE (x) == BLKmode)
5691 /* If X is in the outgoing argument area, it is always safe. */
5692 || (GET_CODE (x) == MEM
5693 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5694 || (GET_CODE (XEXP (x, 0)) == PLUS
5695 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5698 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5699 find the underlying pseudo. */
5700 if (GET_CODE (x) == SUBREG)
5703 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5707 /* A SAVE_EXPR might appear many times in the expression passed to the
5708 top-level safe_from_p call, and if it has a complex subexpression,
5709 examining it multiple times could result in a combinatorial explosion.
5710 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5711 with optimization took about 28 minutes to compile -- even though it was
5712 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5713 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5714 we have processed. Note that the only test of top_p was above. */
5723 rtn = safe_from_p (x, exp, 0);
5725 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5726 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5731 /* Now look at our tree code and possibly recurse. */
5732 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5735 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5742 if (TREE_CODE (exp) == TREE_LIST)
5743 return ((TREE_VALUE (exp) == 0
5744 || safe_from_p (x, TREE_VALUE (exp), 0))
5745 && (TREE_CHAIN (exp) == 0
5746 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5747 else if (TREE_CODE (exp) == ERROR_MARK)
5748 return 1; /* An already-visited SAVE_EXPR? */
5753 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5757 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5758 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5762 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5763 the expression. If it is set, we conflict iff we are that rtx or
5764 both are in memory. Otherwise, we check all operands of the
5765 expression recursively. */
5767 switch (TREE_CODE (exp))
5770 /* If the operand is static or we are static, we can't conflict.
5771 Likewise if we don't conflict with the operand at all. */
5772 if (staticp (TREE_OPERAND (exp, 0))
5773 || TREE_STATIC (exp)
5774 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5777 /* Otherwise, the only way this can conflict is if we are taking
5778 the address of a DECL a that address if part of X, which is
5780 exp = TREE_OPERAND (exp, 0);
5783 if (!DECL_RTL_SET_P (exp)
5784 || GET_CODE (DECL_RTL (exp)) != MEM)
5787 exp_rtl = XEXP (DECL_RTL (exp), 0);
5792 if (GET_CODE (x) == MEM
5793 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5794 get_alias_set (exp)))
5799 /* Assume that the call will clobber all hard registers and
5801 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5802 || GET_CODE (x) == MEM)
5807 /* If a sequence exists, we would have to scan every instruction
5808 in the sequence to see if it was safe. This is probably not
5810 if (RTL_EXPR_SEQUENCE (exp))
5813 exp_rtl = RTL_EXPR_RTL (exp);
5816 case WITH_CLEANUP_EXPR:
5817 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5820 case CLEANUP_POINT_EXPR:
5821 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5824 exp_rtl = SAVE_EXPR_RTL (exp);
5828 /* If we've already scanned this, don't do it again. Otherwise,
5829 show we've scanned it and record for clearing the flag if we're
5831 if (TREE_PRIVATE (exp))
5834 TREE_PRIVATE (exp) = 1;
5835 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5837 TREE_PRIVATE (exp) = 0;
5841 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5845 /* The only operand we look at is operand 1. The rest aren't
5846 part of the expression. */
5847 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5849 case METHOD_CALL_EXPR:
5850 /* This takes an rtx argument, but shouldn't appear here. */
5857 /* If we have an rtx, we do not need to scan our operands. */
5861 nops = first_rtl_op (TREE_CODE (exp));
5862 for (i = 0; i < nops; i++)
5863 if (TREE_OPERAND (exp, i) != 0
5864 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5867 /* If this is a language-specific tree code, it may require
5868 special handling. */
5869 if ((unsigned int) TREE_CODE (exp)
5870 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5872 && !(*lang_safe_from_p) (x, exp))
5876 /* If we have an rtl, find any enclosed object. Then see if we conflict
5880 if (GET_CODE (exp_rtl) == SUBREG)
5882 exp_rtl = SUBREG_REG (exp_rtl);
5883 if (GET_CODE (exp_rtl) == REG
5884 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5888 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5889 are memory and they conflict. */
5890 return ! (rtx_equal_p (x, exp_rtl)
5891 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5892 && true_dependence (exp_rtl, GET_MODE (x), x,
5893 rtx_addr_varies_p)));
5896 /* If we reach here, it is safe. */
5900 /* Subroutine of expand_expr: return rtx if EXP is a
5901 variable or parameter; else return 0. */
5908 switch (TREE_CODE (exp))
5912 return DECL_RTL (exp);
5918 #ifdef MAX_INTEGER_COMPUTATION_MODE
5921 check_max_integer_computation_mode (exp)
5924 enum tree_code code;
5925 enum machine_mode mode;
5927 /* Strip any NOPs that don't change the mode. */
5929 code = TREE_CODE (exp);
5931 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5932 if (code == NOP_EXPR
5933 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5936 /* First check the type of the overall operation. We need only look at
5937 unary, binary and relational operations. */
5938 if (TREE_CODE_CLASS (code) == '1'
5939 || TREE_CODE_CLASS (code) == '2'
5940 || TREE_CODE_CLASS (code) == '<')
5942 mode = TYPE_MODE (TREE_TYPE (exp));
5943 if (GET_MODE_CLASS (mode) == MODE_INT
5944 && mode > MAX_INTEGER_COMPUTATION_MODE)
5945 internal_error ("unsupported wide integer operation");
5948 /* Check operand of a unary op. */
5949 if (TREE_CODE_CLASS (code) == '1')
5951 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5952 if (GET_MODE_CLASS (mode) == MODE_INT
5953 && mode > MAX_INTEGER_COMPUTATION_MODE)
5954 internal_error ("unsupported wide integer operation");
5957 /* Check operands of a binary/comparison op. */
5958 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5960 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5961 if (GET_MODE_CLASS (mode) == MODE_INT
5962 && mode > MAX_INTEGER_COMPUTATION_MODE)
5963 internal_error ("unsupported wide integer operation");
5965 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5966 if (GET_MODE_CLASS (mode) == MODE_INT
5967 && mode > MAX_INTEGER_COMPUTATION_MODE)
5968 internal_error ("unsupported wide integer operation");
5973 /* Return the highest power of two that EXP is known to be a multiple of.
5974 This is used in updating alignment of MEMs in array references. */
5976 static HOST_WIDE_INT
5977 highest_pow2_factor (exp)
5980 HOST_WIDE_INT c0, c1;
5982 switch (TREE_CODE (exp))
5985 /* If the integer is expressable in a HOST_WIDE_INT, we can find
5986 the lowest bit that's a one. If the result is zero or negative,
5987 pessimize by returning 1. This is overly-conservative, but such
5988 things should not happen in the offset expressions that we are
5990 if (host_integerp (exp, 0))
5992 c0 = tree_low_cst (exp, 0);
5993 return c0 >= 0 ? c0 & -c0 : 1;
5997 case PLUS_EXPR: case MINUS_EXPR:
5998 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5999 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6000 return MIN (c0, c1);
6003 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6004 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6007 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6009 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6010 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6011 return MAX (1, c0 / c1);
6013 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6014 case COMPOUND_EXPR: case SAVE_EXPR:
6015 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6018 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6019 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6020 return MIN (c0, c1);
6029 /* Return an object on the placeholder list that matches EXP, a
6030 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6031 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6032 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6033 is a location which initially points to a starting location in the
6034 placeholder list (zero means start of the list) and where a pointer into
6035 the placeholder list at which the object is found is placed. */
6038 find_placeholder (exp, plist)
6042 tree type = TREE_TYPE (exp);
6043 tree placeholder_expr;
6045 for (placeholder_expr
6046 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6047 placeholder_expr != 0;
6048 placeholder_expr = TREE_CHAIN (placeholder_expr))
6050 tree need_type = TYPE_MAIN_VARIANT (type);
6053 /* Find the outermost reference that is of the type we want. If none,
6054 see if any object has a type that is a pointer to the type we
6056 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6057 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6058 || TREE_CODE (elt) == COND_EXPR)
6059 ? TREE_OPERAND (elt, 1)
6060 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6061 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6062 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6063 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6064 ? TREE_OPERAND (elt, 0) : 0))
6065 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6068 *plist = placeholder_expr;
6072 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6074 = ((TREE_CODE (elt) == COMPOUND_EXPR
6075 || TREE_CODE (elt) == COND_EXPR)
6076 ? TREE_OPERAND (elt, 1)
6077 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6078 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6079 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6080 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6081 ? TREE_OPERAND (elt, 0) : 0))
6082 if (POINTER_TYPE_P (TREE_TYPE (elt))
6083 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6087 *plist = placeholder_expr;
6088 return build1 (INDIRECT_REF, need_type, elt);
6095 /* expand_expr: generate code for computing expression EXP.
6096 An rtx for the computed value is returned. The value is never null.
6097 In the case of a void EXP, const0_rtx is returned.
6099 The value may be stored in TARGET if TARGET is nonzero.
6100 TARGET is just a suggestion; callers must assume that
6101 the rtx returned may not be the same as TARGET.
6103 If TARGET is CONST0_RTX, it means that the value will be ignored.
6105 If TMODE is not VOIDmode, it suggests generating the
6106 result in mode TMODE. But this is done only when convenient.
6107 Otherwise, TMODE is ignored and the value generated in its natural mode.
6108 TMODE is just a suggestion; callers must assume that
6109 the rtx returned may not have mode TMODE.
6111 Note that TARGET may have neither TMODE nor MODE. In that case, it
6112 probably will not be used.
6114 If MODIFIER is EXPAND_SUM then when EXP is an addition
6115 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6116 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6117 products as above, or REG or MEM, or constant.
6118 Ordinarily in such cases we would output mul or add instructions
6119 and then return a pseudo reg containing the sum.
6121 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6122 it also marks a label as absolutely required (it can't be dead).
6123 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6124 This is used for outputting expressions used in initializers.
6126 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6127 with a constant address even if that address is not normally legitimate.
6128 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6131 expand_expr (exp, target, tmode, modifier)
6134 enum machine_mode tmode;
6135 enum expand_modifier modifier;
6138 tree type = TREE_TYPE (exp);
6139 int unsignedp = TREE_UNSIGNED (type);
6140 enum machine_mode mode;
6141 enum tree_code code = TREE_CODE (exp);
6143 rtx subtarget, original_target;
6146 /* Used by check-memory-usage to make modifier read only. */
6147 enum expand_modifier ro_modifier;
6149 /* Handle ERROR_MARK before anybody tries to access its type. */
6150 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6152 op0 = CONST0_RTX (tmode);
6158 mode = TYPE_MODE (type);
6159 /* Use subtarget as the target for operand 0 of a binary operation. */
6160 subtarget = get_subtarget (target);
6161 original_target = target;
6162 ignore = (target == const0_rtx
6163 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6164 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6165 || code == COND_EXPR)
6166 && TREE_CODE (type) == VOID_TYPE));
6168 /* Make a read-only version of the modifier. */
6169 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6170 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6171 ro_modifier = modifier;
6173 ro_modifier = EXPAND_NORMAL;
6175 /* If we are going to ignore this result, we need only do something
6176 if there is a side-effect somewhere in the expression. If there
6177 is, short-circuit the most common cases here. Note that we must
6178 not call expand_expr with anything but const0_rtx in case this
6179 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6183 if (! TREE_SIDE_EFFECTS (exp))
6186 /* Ensure we reference a volatile object even if value is ignored, but
6187 don't do this if all we are doing is taking its address. */
6188 if (TREE_THIS_VOLATILE (exp)
6189 && TREE_CODE (exp) != FUNCTION_DECL
6190 && mode != VOIDmode && mode != BLKmode
6191 && modifier != EXPAND_CONST_ADDRESS)
6193 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6194 if (GET_CODE (temp) == MEM)
6195 temp = copy_to_reg (temp);
6199 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6200 || code == INDIRECT_REF || code == BUFFER_REF)
6201 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6202 VOIDmode, ro_modifier);
6203 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6204 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6206 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6208 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6212 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6213 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6214 /* If the second operand has no side effects, just evaluate
6216 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6217 VOIDmode, ro_modifier);
6218 else if (code == BIT_FIELD_REF)
6220 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6222 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6224 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6232 #ifdef MAX_INTEGER_COMPUTATION_MODE
6233 /* Only check stuff here if the mode we want is different from the mode
6234 of the expression; if it's the same, check_max_integer_computiation_mode
6235 will handle it. Do we really need to check this stuff at all? */
6238 && GET_MODE (target) != mode
6239 && TREE_CODE (exp) != INTEGER_CST
6240 && TREE_CODE (exp) != PARM_DECL
6241 && TREE_CODE (exp) != ARRAY_REF
6242 && TREE_CODE (exp) != ARRAY_RANGE_REF
6243 && TREE_CODE (exp) != COMPONENT_REF
6244 && TREE_CODE (exp) != BIT_FIELD_REF
6245 && TREE_CODE (exp) != INDIRECT_REF
6246 && TREE_CODE (exp) != CALL_EXPR
6247 && TREE_CODE (exp) != VAR_DECL
6248 && TREE_CODE (exp) != RTL_EXPR)
6250 enum machine_mode mode = GET_MODE (target);
6252 if (GET_MODE_CLASS (mode) == MODE_INT
6253 && mode > MAX_INTEGER_COMPUTATION_MODE)
6254 internal_error ("unsupported wide integer operation");
6258 && TREE_CODE (exp) != INTEGER_CST
6259 && TREE_CODE (exp) != PARM_DECL
6260 && TREE_CODE (exp) != ARRAY_REF
6261 && TREE_CODE (exp) != ARRAY_RANGE_REF
6262 && TREE_CODE (exp) != COMPONENT_REF
6263 && TREE_CODE (exp) != BIT_FIELD_REF
6264 && TREE_CODE (exp) != INDIRECT_REF
6265 && TREE_CODE (exp) != VAR_DECL
6266 && TREE_CODE (exp) != CALL_EXPR
6267 && TREE_CODE (exp) != RTL_EXPR
6268 && GET_MODE_CLASS (tmode) == MODE_INT
6269 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6270 internal_error ("unsupported wide integer operation");
6272 check_max_integer_computation_mode (exp);
6275 /* If will do cse, generate all results into pseudo registers
6276 since 1) that allows cse to find more things
6277 and 2) otherwise cse could produce an insn the machine
6280 if (! cse_not_expected && mode != BLKmode && target
6281 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6288 tree function = decl_function_context (exp);
6289 /* Handle using a label in a containing function. */
6290 if (function != current_function_decl
6291 && function != inline_function_decl && function != 0)
6293 struct function *p = find_function_data (function);
6294 p->expr->x_forced_labels
6295 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6296 p->expr->x_forced_labels);
6300 if (modifier == EXPAND_INITIALIZER)
6301 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6306 temp = gen_rtx_MEM (FUNCTION_MODE,
6307 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6308 if (function != current_function_decl
6309 && function != inline_function_decl && function != 0)
6310 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6315 if (DECL_RTL (exp) == 0)
6317 error_with_decl (exp, "prior parameter's size depends on `%s'");
6318 return CONST0_RTX (mode);
6321 /* ... fall through ... */
6324 /* If a static var's type was incomplete when the decl was written,
6325 but the type is complete now, lay out the decl now. */
6326 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6327 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6329 layout_decl (exp, 0);
6330 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6333 /* Although static-storage variables start off initialized, according to
6334 ANSI C, a memcpy could overwrite them with uninitialized values. So
6335 we check them too. This also lets us check for read-only variables
6336 accessed via a non-const declaration, in case it won't be detected
6337 any other way (e.g., in an embedded system or OS kernel without
6340 Aggregates are not checked here; they're handled elsewhere. */
6341 if (cfun && current_function_check_memory_usage
6343 && GET_CODE (DECL_RTL (exp)) == MEM
6344 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6346 enum memory_use_mode memory_usage;
6347 memory_usage = get_memory_usage_from_modifier (modifier);
6349 in_check_memory_usage = 1;
6350 if (memory_usage != MEMORY_USE_DONT)
6351 emit_library_call (chkr_check_addr_libfunc,
6352 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6353 XEXP (DECL_RTL (exp), 0), Pmode,
6354 GEN_INT (int_size_in_bytes (type)),
6355 TYPE_MODE (sizetype),
6356 GEN_INT (memory_usage),
6357 TYPE_MODE (integer_type_node));
6358 in_check_memory_usage = 0;
6361 /* ... fall through ... */
6365 if (DECL_RTL (exp) == 0)
6368 /* Ensure variable marked as used even if it doesn't go through
6369 a parser. If it hasn't be used yet, write out an external
6371 if (! TREE_USED (exp))
6373 assemble_external (exp);
6374 TREE_USED (exp) = 1;
6377 /* Show we haven't gotten RTL for this yet. */
6380 /* Handle variables inherited from containing functions. */
6381 context = decl_function_context (exp);
6383 /* We treat inline_function_decl as an alias for the current function
6384 because that is the inline function whose vars, types, etc.
6385 are being merged into the current function.
6386 See expand_inline_function. */
6388 if (context != 0 && context != current_function_decl
6389 && context != inline_function_decl
6390 /* If var is static, we don't need a static chain to access it. */
6391 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6392 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6396 /* Mark as non-local and addressable. */
6397 DECL_NONLOCAL (exp) = 1;
6398 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6400 mark_addressable (exp);
6401 if (GET_CODE (DECL_RTL (exp)) != MEM)
6403 addr = XEXP (DECL_RTL (exp), 0);
6404 if (GET_CODE (addr) == MEM)
6406 = replace_equiv_address (addr,
6407 fix_lexical_addr (XEXP (addr, 0), exp));
6409 addr = fix_lexical_addr (addr, exp);
6411 temp = replace_equiv_address (DECL_RTL (exp), addr);
6414 /* This is the case of an array whose size is to be determined
6415 from its initializer, while the initializer is still being parsed.
6418 else if (GET_CODE (DECL_RTL (exp)) == MEM
6419 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6420 temp = validize_mem (DECL_RTL (exp));
6422 /* If DECL_RTL is memory, we are in the normal case and either
6423 the address is not valid or it is not a register and -fforce-addr
6424 is specified, get the address into a register. */
6426 else if (GET_CODE (DECL_RTL (exp)) == MEM
6427 && modifier != EXPAND_CONST_ADDRESS
6428 && modifier != EXPAND_SUM
6429 && modifier != EXPAND_INITIALIZER
6430 && (! memory_address_p (DECL_MODE (exp),
6431 XEXP (DECL_RTL (exp), 0))
6433 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6434 temp = replace_equiv_address (DECL_RTL (exp),
6435 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6437 /* If we got something, return it. But first, set the alignment
6438 if the address is a register. */
6441 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6442 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6447 /* If the mode of DECL_RTL does not match that of the decl, it
6448 must be a promoted value. We return a SUBREG of the wanted mode,
6449 but mark it so that we know that it was already extended. */
6451 if (GET_CODE (DECL_RTL (exp)) == REG
6452 && GET_MODE (DECL_RTL (exp)) != mode)
6454 /* Get the signedness used for this variable. Ensure we get the
6455 same mode we got when the variable was declared. */
6456 if (GET_MODE (DECL_RTL (exp))
6457 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6460 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6461 SUBREG_PROMOTED_VAR_P (temp) = 1;
6462 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6466 return DECL_RTL (exp);
6469 return immed_double_const (TREE_INT_CST_LOW (exp),
6470 TREE_INT_CST_HIGH (exp), mode);
6473 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6474 EXPAND_MEMORY_USE_BAD);
6477 /* If optimized, generate immediate CONST_DOUBLE
6478 which will be turned into memory by reload if necessary.
6480 We used to force a register so that loop.c could see it. But
6481 this does not allow gen_* patterns to perform optimizations with
6482 the constants. It also produces two insns in cases like "x = 1.0;".
6483 On most machines, floating-point constants are not permitted in
6484 many insns, so we'd end up copying it to a register in any case.
6486 Now, we do the copying in expand_binop, if appropriate. */
6487 return immed_real_const (exp);
6491 if (! TREE_CST_RTL (exp))
6492 output_constant_def (exp, 1);
6494 /* TREE_CST_RTL probably contains a constant address.
6495 On RISC machines where a constant address isn't valid,
6496 make some insns to get that address into a register. */
6497 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6498 && modifier != EXPAND_CONST_ADDRESS
6499 && modifier != EXPAND_INITIALIZER
6500 && modifier != EXPAND_SUM
6501 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6503 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6504 return replace_equiv_address (TREE_CST_RTL (exp),
6505 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6506 return TREE_CST_RTL (exp);
6508 case EXPR_WITH_FILE_LOCATION:
6511 const char *saved_input_filename = input_filename;
6512 int saved_lineno = lineno;
6513 input_filename = EXPR_WFL_FILENAME (exp);
6514 lineno = EXPR_WFL_LINENO (exp);
6515 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6516 emit_line_note (input_filename, lineno);
6517 /* Possibly avoid switching back and forth here. */
6518 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6519 input_filename = saved_input_filename;
6520 lineno = saved_lineno;
6525 context = decl_function_context (exp);
6527 /* If this SAVE_EXPR was at global context, assume we are an
6528 initialization function and move it into our context. */
6530 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6532 /* We treat inline_function_decl as an alias for the current function
6533 because that is the inline function whose vars, types, etc.
6534 are being merged into the current function.
6535 See expand_inline_function. */
6536 if (context == current_function_decl || context == inline_function_decl)
6539 /* If this is non-local, handle it. */
6542 /* The following call just exists to abort if the context is
6543 not of a containing function. */
6544 find_function_data (context);
6546 temp = SAVE_EXPR_RTL (exp);
6547 if (temp && GET_CODE (temp) == REG)
6549 put_var_into_stack (exp);
6550 temp = SAVE_EXPR_RTL (exp);
6552 if (temp == 0 || GET_CODE (temp) != MEM)
6555 replace_equiv_address (temp,
6556 fix_lexical_addr (XEXP (temp, 0), exp));
6558 if (SAVE_EXPR_RTL (exp) == 0)
6560 if (mode == VOIDmode)
6563 temp = assign_temp (build_qualified_type (type,
6565 | TYPE_QUAL_CONST)),
6568 SAVE_EXPR_RTL (exp) = temp;
6569 if (!optimize && GET_CODE (temp) == REG)
6570 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6573 /* If the mode of TEMP does not match that of the expression, it
6574 must be a promoted value. We pass store_expr a SUBREG of the
6575 wanted mode but mark it so that we know that it was already
6576 extended. Note that `unsignedp' was modified above in
6579 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6581 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6582 SUBREG_PROMOTED_VAR_P (temp) = 1;
6583 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6586 if (temp == const0_rtx)
6587 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6588 EXPAND_MEMORY_USE_BAD);
6590 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6592 TREE_USED (exp) = 1;
6595 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6596 must be a promoted value. We return a SUBREG of the wanted mode,
6597 but mark it so that we know that it was already extended. */
6599 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6600 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6602 /* Compute the signedness and make the proper SUBREG. */
6603 promote_mode (type, mode, &unsignedp, 0);
6604 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6605 SUBREG_PROMOTED_VAR_P (temp) = 1;
6606 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6610 return SAVE_EXPR_RTL (exp);
6615 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6616 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6620 case PLACEHOLDER_EXPR:
6622 tree old_list = placeholder_list;
6623 tree placeholder_expr = 0;
6625 exp = find_placeholder (exp, &placeholder_expr);
6629 placeholder_list = TREE_CHAIN (placeholder_expr);
6630 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6631 placeholder_list = old_list;
6635 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6638 case WITH_RECORD_EXPR:
6639 /* Put the object on the placeholder list, expand our first operand,
6640 and pop the list. */
6641 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6643 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6644 tmode, ro_modifier);
6645 placeholder_list = TREE_CHAIN (placeholder_list);
6649 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6650 expand_goto (TREE_OPERAND (exp, 0));
6652 expand_computed_goto (TREE_OPERAND (exp, 0));
6656 expand_exit_loop_if_false (NULL,
6657 invert_truthvalue (TREE_OPERAND (exp, 0)));
6660 case LABELED_BLOCK_EXPR:
6661 if (LABELED_BLOCK_BODY (exp))
6662 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6663 /* Should perhaps use expand_label, but this is simpler and safer. */
6664 do_pending_stack_adjust ();
6665 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6668 case EXIT_BLOCK_EXPR:
6669 if (EXIT_BLOCK_RETURN (exp))
6670 sorry ("returned value in block_exit_expr");
6671 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6676 expand_start_loop (1);
6677 expand_expr_stmt (TREE_OPERAND (exp, 0));
6685 tree vars = TREE_OPERAND (exp, 0);
6686 int vars_need_expansion = 0;
6688 /* Need to open a binding contour here because
6689 if there are any cleanups they must be contained here. */
6690 expand_start_bindings (2);
6692 /* Mark the corresponding BLOCK for output in its proper place. */
6693 if (TREE_OPERAND (exp, 2) != 0
6694 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6695 insert_block (TREE_OPERAND (exp, 2));
6697 /* If VARS have not yet been expanded, expand them now. */
6700 if (!DECL_RTL_SET_P (vars))
6702 vars_need_expansion = 1;
6705 expand_decl_init (vars);
6706 vars = TREE_CHAIN (vars);
6709 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6711 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6717 if (RTL_EXPR_SEQUENCE (exp))
6719 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6721 emit_insns (RTL_EXPR_SEQUENCE (exp));
6722 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6724 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6725 free_temps_for_rtl_expr (exp);
6726 return RTL_EXPR_RTL (exp);
6729 /* If we don't need the result, just ensure we evaluate any
6734 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6735 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6736 EXPAND_MEMORY_USE_BAD);
6740 /* All elts simple constants => refer to a constant in memory. But
6741 if this is a non-BLKmode mode, let it store a field at a time
6742 since that should make a CONST_INT or CONST_DOUBLE when we
6743 fold. Likewise, if we have a target we can use, it is best to
6744 store directly into the target unless the type is large enough
6745 that memcpy will be used. If we are making an initializer and
6746 all operands are constant, put it in memory as well. */
6747 else if ((TREE_STATIC (exp)
6748 && ((mode == BLKmode
6749 && ! (target != 0 && safe_from_p (target, exp, 1)))
6750 || TREE_ADDRESSABLE (exp)
6751 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6752 && (! MOVE_BY_PIECES_P
6753 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6755 && ! mostly_zeros_p (exp))))
6756 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6758 rtx constructor = output_constant_def (exp, 1);
6760 if (modifier != EXPAND_CONST_ADDRESS
6761 && modifier != EXPAND_INITIALIZER
6762 && modifier != EXPAND_SUM)
6763 constructor = validize_mem (constructor);
6769 /* Handle calls that pass values in multiple non-contiguous
6770 locations. The Irix 6 ABI has examples of this. */
6771 if (target == 0 || ! safe_from_p (target, exp, 1)
6772 || GET_CODE (target) == PARALLEL)
6774 = assign_temp (build_qualified_type (type,
6776 | (TREE_READONLY (exp)
6777 * TYPE_QUAL_CONST))),
6778 TREE_ADDRESSABLE (exp), 1, 1);
6780 store_constructor (exp, target, 0,
6781 int_size_in_bytes (TREE_TYPE (exp)));
6787 tree exp1 = TREE_OPERAND (exp, 0);
6789 tree string = string_constant (exp1, &index);
6791 /* Try to optimize reads from const strings. */
6793 && TREE_CODE (string) == STRING_CST
6794 && TREE_CODE (index) == INTEGER_CST
6795 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6796 && GET_MODE_CLASS (mode) == MODE_INT
6797 && GET_MODE_SIZE (mode) == 1
6798 && modifier != EXPAND_MEMORY_USE_WO)
6800 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6802 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6803 op0 = memory_address (mode, op0);
6805 if (cfun && current_function_check_memory_usage
6806 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6808 enum memory_use_mode memory_usage;
6809 memory_usage = get_memory_usage_from_modifier (modifier);
6811 if (memory_usage != MEMORY_USE_DONT)
6813 in_check_memory_usage = 1;
6814 emit_library_call (chkr_check_addr_libfunc,
6815 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6816 Pmode, GEN_INT (int_size_in_bytes (type)),
6817 TYPE_MODE (sizetype),
6818 GEN_INT (memory_usage),
6819 TYPE_MODE (integer_type_node));
6820 in_check_memory_usage = 0;
6824 temp = gen_rtx_MEM (mode, op0);
6825 set_mem_attributes (temp, exp, 0);
6827 /* If we are writing to this object and its type is a record with
6828 readonly fields, we must mark it as readonly so it will
6829 conflict with readonly references to those fields. */
6830 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6831 RTX_UNCHANGING_P (temp) = 1;
6837 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6841 tree array = TREE_OPERAND (exp, 0);
6842 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6843 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6844 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6847 /* Optimize the special-case of a zero lower bound.
6849 We convert the low_bound to sizetype to avoid some problems
6850 with constant folding. (E.g. suppose the lower bound is 1,
6851 and its mode is QI. Without the conversion, (ARRAY
6852 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6853 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6855 if (! integer_zerop (low_bound))
6856 index = size_diffop (index, convert (sizetype, low_bound));
6858 /* Fold an expression like: "foo"[2].
6859 This is not done in fold so it won't happen inside &.
6860 Don't fold if this is for wide characters since it's too
6861 difficult to do correctly and this is a very rare case. */
6863 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6864 && TREE_CODE (array) == STRING_CST
6865 && TREE_CODE (index) == INTEGER_CST
6866 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6867 && GET_MODE_CLASS (mode) == MODE_INT
6868 && GET_MODE_SIZE (mode) == 1)
6870 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6872 /* If this is a constant index into a constant array,
6873 just get the value from the array. Handle both the cases when
6874 we have an explicit constructor and when our operand is a variable
6875 that was declared const. */
6877 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6878 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6879 && TREE_CODE (index) == INTEGER_CST
6880 && 0 > compare_tree_int (index,
6881 list_length (CONSTRUCTOR_ELTS
6882 (TREE_OPERAND (exp, 0)))))
6886 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6887 i = TREE_INT_CST_LOW (index);
6888 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6892 return expand_expr (fold (TREE_VALUE (elem)), target,
6893 tmode, ro_modifier);
6896 else if (optimize >= 1
6897 && modifier != EXPAND_CONST_ADDRESS
6898 && modifier != EXPAND_INITIALIZER
6899 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6900 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6901 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6903 if (TREE_CODE (index) == INTEGER_CST)
6905 tree init = DECL_INITIAL (array);
6907 if (TREE_CODE (init) == CONSTRUCTOR)
6911 for (elem = CONSTRUCTOR_ELTS (init);
6913 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6914 elem = TREE_CHAIN (elem))
6917 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6918 return expand_expr (fold (TREE_VALUE (elem)), target,
6919 tmode, ro_modifier);
6921 else if (TREE_CODE (init) == STRING_CST
6922 && 0 > compare_tree_int (index,
6923 TREE_STRING_LENGTH (init)))
6925 tree type = TREE_TYPE (TREE_TYPE (init));
6926 enum machine_mode mode = TYPE_MODE (type);
6928 if (GET_MODE_CLASS (mode) == MODE_INT
6929 && GET_MODE_SIZE (mode) == 1)
6931 (TREE_STRING_POINTER
6932 (init)[TREE_INT_CST_LOW (index)]));
6941 case ARRAY_RANGE_REF:
6942 /* If the operand is a CONSTRUCTOR, we can just extract the
6943 appropriate field if it is present. Don't do this if we have
6944 already written the data since we want to refer to that copy
6945 and varasm.c assumes that's what we'll do. */
6946 if (code == COMPONENT_REF
6947 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6948 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6952 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6953 elt = TREE_CHAIN (elt))
6954 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6955 /* We can normally use the value of the field in the
6956 CONSTRUCTOR. However, if this is a bitfield in
6957 an integral mode that we can fit in a HOST_WIDE_INT,
6958 we must mask only the number of bits in the bitfield,
6959 since this is done implicitly by the constructor. If
6960 the bitfield does not meet either of those conditions,
6961 we can't do this optimization. */
6962 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6963 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6965 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6966 <= HOST_BITS_PER_WIDE_INT))))
6968 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6969 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6971 HOST_WIDE_INT bitsize
6972 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6974 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6976 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6977 op0 = expand_and (op0, op1, target);
6981 enum machine_mode imode
6982 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6984 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6987 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6989 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6999 enum machine_mode mode1;
7000 HOST_WIDE_INT bitsize, bitpos;
7003 unsigned int alignment;
7004 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7005 &mode1, &unsignedp, &volatilep,
7009 /* If we got back the original object, something is wrong. Perhaps
7010 we are evaluating an expression too early. In any event, don't
7011 infinitely recurse. */
7015 /* If TEM's type is a union of variable size, pass TARGET to the inner
7016 computation, since it will need a temporary and TARGET is known
7017 to have to do. This occurs in unchecked conversion in Ada. */
7021 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7022 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7024 ? target : NULL_RTX),
7026 (modifier == EXPAND_INITIALIZER
7027 || modifier == EXPAND_CONST_ADDRESS)
7028 ? modifier : EXPAND_NORMAL);
7030 /* If this is a constant, put it into a register if it is a
7031 legitimate constant and OFFSET is 0 and memory if it isn't. */
7032 if (CONSTANT_P (op0))
7034 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7035 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7037 op0 = force_reg (mode, op0);
7039 op0 = validize_mem (force_const_mem (mode, op0));
7044 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7046 /* If this object is in a register, put it into memory.
7047 This case can't occur in C, but can in Ada if we have
7048 unchecked conversion of an expression from a scalar type to
7049 an array or record type. */
7050 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7051 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7053 /* If the operand is a SAVE_EXPR, we can deal with this by
7054 forcing the SAVE_EXPR into memory. */
7055 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7057 put_var_into_stack (TREE_OPERAND (exp, 0));
7058 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7063 = build_qualified_type (TREE_TYPE (tem),
7064 (TYPE_QUALS (TREE_TYPE (tem))
7065 | TYPE_QUAL_CONST));
7066 rtx memloc = assign_temp (nt, 1, 1, 1);
7068 mark_temp_addr_taken (memloc);
7069 emit_move_insn (memloc, op0);
7074 if (GET_CODE (op0) != MEM)
7077 if (GET_MODE (offset_rtx) != ptr_mode)
7078 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7080 #ifdef POINTERS_EXTEND_UNSIGNED
7081 if (GET_MODE (offset_rtx) != Pmode)
7082 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7085 /* A constant address in OP0 can have VOIDmode, we must not try
7086 to call force_reg for that case. Avoid that case. */
7087 if (GET_CODE (op0) == MEM
7088 && GET_MODE (op0) == BLKmode
7089 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7091 && (bitpos % bitsize) == 0
7092 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7093 && alignment == GET_MODE_ALIGNMENT (mode1))
7095 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7097 if (GET_CODE (XEXP (temp, 0)) == REG)
7100 op0 = (replace_equiv_address
7102 force_reg (GET_MODE (XEXP (temp, 0)),
7107 op0 = offset_address (op0, offset_rtx,
7108 highest_pow2_factor (offset));
7111 /* Don't forget about volatility even if this is a bitfield. */
7112 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7114 if (op0 == orig_op0)
7115 op0 = copy_rtx (op0);
7117 MEM_VOLATILE_P (op0) = 1;
7120 /* Check the access. */
7121 if (cfun != 0 && current_function_check_memory_usage
7122 && GET_CODE (op0) == MEM)
7124 enum memory_use_mode memory_usage;
7125 memory_usage = get_memory_usage_from_modifier (modifier);
7127 if (memory_usage != MEMORY_USE_DONT)
7132 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7133 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7135 /* Check the access right of the pointer. */
7136 in_check_memory_usage = 1;
7137 if (size > BITS_PER_UNIT)
7138 emit_library_call (chkr_check_addr_libfunc,
7139 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7140 Pmode, GEN_INT (size / BITS_PER_UNIT),
7141 TYPE_MODE (sizetype),
7142 GEN_INT (memory_usage),
7143 TYPE_MODE (integer_type_node));
7144 in_check_memory_usage = 0;
7148 /* In cases where an aligned union has an unaligned object
7149 as a field, we might be extracting a BLKmode value from
7150 an integer-mode (e.g., SImode) object. Handle this case
7151 by doing the extract into an object as wide as the field
7152 (which we know to be the width of a basic mode), then
7153 storing into memory, and changing the mode to BLKmode. */
7154 if (mode1 == VOIDmode
7155 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7156 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7157 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7158 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7159 && modifier != EXPAND_CONST_ADDRESS
7160 && modifier != EXPAND_INITIALIZER)
7161 /* If the field isn't aligned enough to fetch as a memref,
7162 fetch it as a bit field. */
7163 || (mode1 != BLKmode
7164 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7165 && ((TYPE_ALIGN (TREE_TYPE (tem))
7166 < GET_MODE_ALIGNMENT (mode))
7167 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7168 /* If the type and the field are a constant size and the
7169 size of the type isn't the same size as the bitfield,
7170 we must use bitfield operations. */
7172 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7174 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7177 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7178 && (TYPE_ALIGN (type) > alignment
7179 || bitpos % TYPE_ALIGN (type) != 0)))
7181 enum machine_mode ext_mode = mode;
7183 if (ext_mode == BLKmode
7184 && ! (target != 0 && GET_CODE (op0) == MEM
7185 && GET_CODE (target) == MEM
7186 && bitpos % BITS_PER_UNIT == 0))
7187 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7189 if (ext_mode == BLKmode)
7191 /* In this case, BITPOS must start at a byte boundary and
7192 TARGET, if specified, must be a MEM. */
7193 if (GET_CODE (op0) != MEM
7194 || (target != 0 && GET_CODE (target) != MEM)
7195 || bitpos % BITS_PER_UNIT != 0)
7198 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7200 target = assign_temp (type, 0, 1, 1);
7202 emit_block_move (target, op0,
7203 bitsize == -1 ? expr_size (exp)
7204 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7210 op0 = validize_mem (op0);
7212 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7213 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7215 op0 = extract_bit_field (op0, bitsize, bitpos,
7216 unsignedp, target, ext_mode, ext_mode,
7217 int_size_in_bytes (TREE_TYPE (tem)));
7219 /* If the result is a record type and BITSIZE is narrower than
7220 the mode of OP0, an integral mode, and this is a big endian
7221 machine, we must put the field into the high-order bits. */
7222 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7223 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7224 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7225 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7226 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7230 if (mode == BLKmode)
7232 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7234 rtx new = assign_temp (nt, 0, 1, 1);
7236 emit_move_insn (new, op0);
7237 op0 = copy_rtx (new);
7238 PUT_MODE (op0, BLKmode);
7244 /* If the result is BLKmode, use that to access the object
7246 if (mode == BLKmode)
7249 /* Get a reference to just this component. */
7250 if (modifier == EXPAND_CONST_ADDRESS
7251 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7252 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7254 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7256 if (op0 == orig_op0)
7257 op0 = copy_rtx (op0);
7259 set_mem_attributes (op0, exp, 0);
7260 if (GET_CODE (XEXP (op0, 0)) == REG)
7261 mark_reg_pointer (XEXP (op0, 0), alignment);
7263 MEM_VOLATILE_P (op0) |= volatilep;
7264 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7265 || modifier == EXPAND_CONST_ADDRESS
7266 || modifier == EXPAND_INITIALIZER)
7268 else if (target == 0)
7269 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7271 convert_move (target, op0, unsignedp);
7277 rtx insn, before = get_last_insn (), vtbl_ref;
7279 /* Evaluate the interior expression. */
7280 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7283 /* Get or create an instruction off which to hang a note. */
7284 if (REG_P (subtarget))
7287 insn = get_last_insn ();
7290 if (! INSN_P (insn))
7291 insn = prev_nonnote_insn (insn);
7295 target = gen_reg_rtx (GET_MODE (subtarget));
7296 insn = emit_move_insn (target, subtarget);
7299 /* Collect the data for the note. */
7300 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7301 vtbl_ref = plus_constant (vtbl_ref,
7302 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7303 /* Discard the initial CONST that was added. */
7304 vtbl_ref = XEXP (vtbl_ref, 0);
7307 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7312 /* Intended for a reference to a buffer of a file-object in Pascal.
7313 But it's not certain that a special tree code will really be
7314 necessary for these. INDIRECT_REF might work for them. */
7320 /* Pascal set IN expression.
7323 rlo = set_low - (set_low%bits_per_word);
7324 the_word = set [ (index - rlo)/bits_per_word ];
7325 bit_index = index % bits_per_word;
7326 bitmask = 1 << bit_index;
7327 return !!(the_word & bitmask); */
7329 tree set = TREE_OPERAND (exp, 0);
7330 tree index = TREE_OPERAND (exp, 1);
7331 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7332 tree set_type = TREE_TYPE (set);
7333 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7334 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7335 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7336 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7337 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7338 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7339 rtx setaddr = XEXP (setval, 0);
7340 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7342 rtx diff, quo, rem, addr, bit, result;
7344 /* If domain is empty, answer is no. Likewise if index is constant
7345 and out of bounds. */
7346 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7347 && TREE_CODE (set_low_bound) == INTEGER_CST
7348 && tree_int_cst_lt (set_high_bound, set_low_bound))
7349 || (TREE_CODE (index) == INTEGER_CST
7350 && TREE_CODE (set_low_bound) == INTEGER_CST
7351 && tree_int_cst_lt (index, set_low_bound))
7352 || (TREE_CODE (set_high_bound) == INTEGER_CST
7353 && TREE_CODE (index) == INTEGER_CST
7354 && tree_int_cst_lt (set_high_bound, index))))
7358 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7360 /* If we get here, we have to generate the code for both cases
7361 (in range and out of range). */
7363 op0 = gen_label_rtx ();
7364 op1 = gen_label_rtx ();
7366 if (! (GET_CODE (index_val) == CONST_INT
7367 && GET_CODE (lo_r) == CONST_INT))
7369 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7370 GET_MODE (index_val), iunsignedp, 0, op1);
7373 if (! (GET_CODE (index_val) == CONST_INT
7374 && GET_CODE (hi_r) == CONST_INT))
7376 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7377 GET_MODE (index_val), iunsignedp, 0, op1);
7380 /* Calculate the element number of bit zero in the first word
7382 if (GET_CODE (lo_r) == CONST_INT)
7383 rlow = GEN_INT (INTVAL (lo_r)
7384 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7386 rlow = expand_binop (index_mode, and_optab, lo_r,
7387 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7388 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7390 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7391 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7393 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7394 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7395 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7396 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7398 addr = memory_address (byte_mode,
7399 expand_binop (index_mode, add_optab, diff,
7400 setaddr, NULL_RTX, iunsignedp,
7403 /* Extract the bit we want to examine. */
7404 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7405 gen_rtx_MEM (byte_mode, addr),
7406 make_tree (TREE_TYPE (index), rem),
7408 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7409 GET_MODE (target) == byte_mode ? target : 0,
7410 1, OPTAB_LIB_WIDEN);
7412 if (result != target)
7413 convert_move (target, result, 1);
7415 /* Output the code to handle the out-of-range case. */
7418 emit_move_insn (target, const0_rtx);
7423 case WITH_CLEANUP_EXPR:
7424 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7426 WITH_CLEANUP_EXPR_RTL (exp)
7427 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7428 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7430 /* That's it for this cleanup. */
7431 TREE_OPERAND (exp, 1) = 0;
7433 return WITH_CLEANUP_EXPR_RTL (exp);
7435 case CLEANUP_POINT_EXPR:
7437 /* Start a new binding layer that will keep track of all cleanup
7438 actions to be performed. */
7439 expand_start_bindings (2);
7441 target_temp_slot_level = temp_slot_level;
7443 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7444 /* If we're going to use this value, load it up now. */
7446 op0 = force_not_mem (op0);
7447 preserve_temp_slots (op0);
7448 expand_end_bindings (NULL_TREE, 0, 0);
7453 /* Check for a built-in function. */
7454 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7455 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7457 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7459 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7460 == BUILT_IN_FRONTEND)
7461 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7463 return expand_builtin (exp, target, subtarget, tmode, ignore);
7466 return expand_call (exp, target, ignore);
7468 case NON_LVALUE_EXPR:
7471 case REFERENCE_EXPR:
7472 if (TREE_OPERAND (exp, 0) == error_mark_node)
7475 if (TREE_CODE (type) == UNION_TYPE)
7477 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7479 /* If both input and output are BLKmode, this conversion
7480 isn't actually doing anything unless we need to make the
7481 alignment stricter. */
7482 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7483 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7484 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7485 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7489 target = assign_temp (type, 0, 1, 1);
7491 if (GET_CODE (target) == MEM)
7492 /* Store data into beginning of memory target. */
7493 store_expr (TREE_OPERAND (exp, 0),
7494 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7496 else if (GET_CODE (target) == REG)
7497 /* Store this field into a union of the proper type. */
7498 store_field (target,
7499 MIN ((int_size_in_bytes (TREE_TYPE
7500 (TREE_OPERAND (exp, 0)))
7502 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7503 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7504 VOIDmode, 0, int_size_in_bytes (type), 0);
7508 /* Return the entire union. */
7512 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7514 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7517 /* If the signedness of the conversion differs and OP0 is
7518 a promoted SUBREG, clear that indication since we now
7519 have to do the proper extension. */
7520 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7521 && GET_CODE (op0) == SUBREG)
7522 SUBREG_PROMOTED_VAR_P (op0) = 0;
7527 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7528 if (GET_MODE (op0) == mode)
7531 /* If OP0 is a constant, just convert it into the proper mode. */
7532 if (CONSTANT_P (op0))
7534 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7535 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7537 if (modifier == EXPAND_INITIALIZER)
7538 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7542 convert_to_mode (mode, op0,
7543 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7545 convert_move (target, op0,
7546 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7550 /* We come here from MINUS_EXPR when the second operand is a
7553 this_optab = ! unsignedp && flag_trapv
7554 && (GET_MODE_CLASS(mode) == MODE_INT)
7555 ? addv_optab : add_optab;
7557 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7558 something else, make sure we add the register to the constant and
7559 then to the other thing. This case can occur during strength
7560 reduction and doing it this way will produce better code if the
7561 frame pointer or argument pointer is eliminated.
7563 fold-const.c will ensure that the constant is always in the inner
7564 PLUS_EXPR, so the only case we need to do anything about is if
7565 sp, ap, or fp is our second argument, in which case we must swap
7566 the innermost first argument and our second argument. */
7568 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7569 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7570 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7571 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7572 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7573 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7575 tree t = TREE_OPERAND (exp, 1);
7577 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7578 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7581 /* If the result is to be ptr_mode and we are adding an integer to
7582 something, we might be forming a constant. So try to use
7583 plus_constant. If it produces a sum and we can't accept it,
7584 use force_operand. This allows P = &ARR[const] to generate
7585 efficient code on machines where a SYMBOL_REF is not a valid
7588 If this is an EXPAND_SUM call, always return the sum. */
7589 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7590 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7592 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7593 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7594 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7598 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7600 /* Use immed_double_const to ensure that the constant is
7601 truncated according to the mode of OP1, then sign extended
7602 to a HOST_WIDE_INT. Using the constant directly can result
7603 in non-canonical RTL in a 64x32 cross compile. */
7605 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7607 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7608 op1 = plus_constant (op1, INTVAL (constant_part));
7609 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7610 op1 = force_operand (op1, target);
7614 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7615 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7616 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7620 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7622 if (! CONSTANT_P (op0))
7624 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7625 VOIDmode, modifier);
7626 /* Don't go to both_summands if modifier
7627 says it's not right to return a PLUS. */
7628 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7632 /* Use immed_double_const to ensure that the constant is
7633 truncated according to the mode of OP1, then sign extended
7634 to a HOST_WIDE_INT. Using the constant directly can result
7635 in non-canonical RTL in a 64x32 cross compile. */
7637 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7639 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7640 op0 = plus_constant (op0, INTVAL (constant_part));
7641 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7642 op0 = force_operand (op0, target);
7647 /* No sense saving up arithmetic to be done
7648 if it's all in the wrong mode to form part of an address.
7649 And force_operand won't know whether to sign-extend or
7651 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7652 || mode != ptr_mode)
7655 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7658 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7659 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7662 /* Make sure any term that's a sum with a constant comes last. */
7663 if (GET_CODE (op0) == PLUS
7664 && CONSTANT_P (XEXP (op0, 1)))
7670 /* If adding to a sum including a constant,
7671 associate it to put the constant outside. */
7672 if (GET_CODE (op1) == PLUS
7673 && CONSTANT_P (XEXP (op1, 1)))
7675 rtx constant_term = const0_rtx;
7677 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7680 /* Ensure that MULT comes first if there is one. */
7681 else if (GET_CODE (op0) == MULT)
7682 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7684 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7686 /* Let's also eliminate constants from op0 if possible. */
7687 op0 = eliminate_constant_term (op0, &constant_term);
7689 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7690 their sum should be a constant. Form it into OP1, since the
7691 result we want will then be OP0 + OP1. */
7693 temp = simplify_binary_operation (PLUS, mode, constant_term,
7698 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7701 /* Put a constant term last and put a multiplication first. */
7702 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7703 temp = op1, op1 = op0, op0 = temp;
7705 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7706 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7709 /* For initializers, we are allowed to return a MINUS of two
7710 symbolic constants. Here we handle all cases when both operands
7712 /* Handle difference of two symbolic constants,
7713 for the sake of an initializer. */
7714 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7715 && really_constant_p (TREE_OPERAND (exp, 0))
7716 && really_constant_p (TREE_OPERAND (exp, 1)))
7718 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7719 VOIDmode, ro_modifier);
7720 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7721 VOIDmode, ro_modifier);
7723 /* If the last operand is a CONST_INT, use plus_constant of
7724 the negated constant. Else make the MINUS. */
7725 if (GET_CODE (op1) == CONST_INT)
7726 return plus_constant (op0, - INTVAL (op1));
7728 return gen_rtx_MINUS (mode, op0, op1);
7730 /* Convert A - const to A + (-const). */
7731 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7733 tree negated = fold (build1 (NEGATE_EXPR, type,
7734 TREE_OPERAND (exp, 1)));
7736 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7737 /* If we can't negate the constant in TYPE, leave it alone and
7738 expand_binop will negate it for us. We used to try to do it
7739 here in the signed version of TYPE, but that doesn't work
7740 on POINTER_TYPEs. */;
7743 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7747 this_optab = ! unsignedp && flag_trapv
7748 && (GET_MODE_CLASS(mode) == MODE_INT)
7749 ? subv_optab : sub_optab;
7753 /* If first operand is constant, swap them.
7754 Thus the following special case checks need only
7755 check the second operand. */
7756 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7758 tree t1 = TREE_OPERAND (exp, 0);
7759 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7760 TREE_OPERAND (exp, 1) = t1;
7763 /* Attempt to return something suitable for generating an
7764 indexed address, for machines that support that. */
7766 if (modifier == EXPAND_SUM && mode == ptr_mode
7767 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7768 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7770 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7773 /* Apply distributive law if OP0 is x+c. */
7774 if (GET_CODE (op0) == PLUS
7775 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7780 (mode, XEXP (op0, 0),
7781 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7782 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7783 * INTVAL (XEXP (op0, 1))));
7785 if (GET_CODE (op0) != REG)
7786 op0 = force_operand (op0, NULL_RTX);
7787 if (GET_CODE (op0) != REG)
7788 op0 = copy_to_mode_reg (mode, op0);
7791 gen_rtx_MULT (mode, op0,
7792 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7795 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7798 /* Check for multiplying things that have been extended
7799 from a narrower type. If this machine supports multiplying
7800 in that narrower type with a result in the desired type,
7801 do it that way, and avoid the explicit type-conversion. */
7802 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7803 && TREE_CODE (type) == INTEGER_TYPE
7804 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7805 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7806 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7807 && int_fits_type_p (TREE_OPERAND (exp, 1),
7808 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7809 /* Don't use a widening multiply if a shift will do. */
7810 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7811 > HOST_BITS_PER_WIDE_INT)
7812 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7814 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7815 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7817 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7818 /* If both operands are extended, they must either both
7819 be zero-extended or both be sign-extended. */
7820 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7822 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7824 enum machine_mode innermode
7825 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7826 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7827 ? smul_widen_optab : umul_widen_optab);
7828 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7829 ? umul_widen_optab : smul_widen_optab);
7830 if (mode == GET_MODE_WIDER_MODE (innermode))
7832 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7834 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7835 NULL_RTX, VOIDmode, 0);
7836 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7837 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7840 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7841 NULL_RTX, VOIDmode, 0);
7844 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7845 && innermode == word_mode)
7848 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7849 NULL_RTX, VOIDmode, 0);
7850 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7851 op1 = convert_modes (innermode, mode,
7852 expand_expr (TREE_OPERAND (exp, 1),
7853 NULL_RTX, VOIDmode, 0),
7856 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7857 NULL_RTX, VOIDmode, 0);
7858 temp = expand_binop (mode, other_optab, op0, op1, target,
7859 unsignedp, OPTAB_LIB_WIDEN);
7860 htem = expand_mult_highpart_adjust (innermode,
7861 gen_highpart (innermode, temp),
7863 gen_highpart (innermode, temp),
7865 emit_move_insn (gen_highpart (innermode, temp), htem);
7870 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7871 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7872 return expand_mult (mode, op0, op1, target, unsignedp);
7874 case TRUNC_DIV_EXPR:
7875 case FLOOR_DIV_EXPR:
7877 case ROUND_DIV_EXPR:
7878 case EXACT_DIV_EXPR:
7879 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7881 /* Possible optimization: compute the dividend with EXPAND_SUM
7882 then if the divisor is constant can optimize the case
7883 where some terms of the dividend have coeffs divisible by it. */
7884 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7885 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7886 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7889 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7890 expensive divide. If not, combine will rebuild the original
7892 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7893 && !real_onep (TREE_OPERAND (exp, 0)))
7894 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7895 build (RDIV_EXPR, type,
7896 build_real (type, dconst1),
7897 TREE_OPERAND (exp, 1))),
7898 target, tmode, unsignedp);
7899 this_optab = sdiv_optab;
7902 case TRUNC_MOD_EXPR:
7903 case FLOOR_MOD_EXPR:
7905 case ROUND_MOD_EXPR:
7906 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7908 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7909 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7910 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7912 case FIX_ROUND_EXPR:
7913 case FIX_FLOOR_EXPR:
7915 abort (); /* Not used for C. */
7917 case FIX_TRUNC_EXPR:
7918 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7920 target = gen_reg_rtx (mode);
7921 expand_fix (target, op0, unsignedp);
7925 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7927 target = gen_reg_rtx (mode);
7928 /* expand_float can't figure out what to do if FROM has VOIDmode.
7929 So give it the correct mode. With -O, cse will optimize this. */
7930 if (GET_MODE (op0) == VOIDmode)
7931 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7933 expand_float (target, op0,
7934 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7938 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7939 temp = expand_unop (mode,
7940 ! unsignedp && flag_trapv
7941 && (GET_MODE_CLASS(mode) == MODE_INT)
7942 ? negv_optab : neg_optab, op0, target, 0);
7948 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7950 /* Handle complex values specially. */
7951 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7952 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7953 return expand_complex_abs (mode, op0, target, unsignedp);
7955 /* Unsigned abs is simply the operand. Testing here means we don't
7956 risk generating incorrect code below. */
7957 if (TREE_UNSIGNED (type))
7960 return expand_abs (mode, op0, target, unsignedp,
7961 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7965 target = original_target;
7966 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7967 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7968 || GET_MODE (target) != mode
7969 || (GET_CODE (target) == REG
7970 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7971 target = gen_reg_rtx (mode);
7972 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7973 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7975 /* First try to do it with a special MIN or MAX instruction.
7976 If that does not win, use a conditional jump to select the proper
7978 this_optab = (TREE_UNSIGNED (type)
7979 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7980 : (code == MIN_EXPR ? smin_optab : smax_optab));
7982 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7987 /* At this point, a MEM target is no longer useful; we will get better
7990 if (GET_CODE (target) == MEM)
7991 target = gen_reg_rtx (mode);
7994 emit_move_insn (target, op0);
7996 op0 = gen_label_rtx ();
7998 /* If this mode is an integer too wide to compare properly,
7999 compare word by word. Rely on cse to optimize constant cases. */
8000 if (GET_MODE_CLASS (mode) == MODE_INT
8001 && ! can_compare_p (GE, mode, ccp_jump))
8003 if (code == MAX_EXPR)
8004 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8005 target, op1, NULL_RTX, op0);
8007 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8008 op1, target, NULL_RTX, op0);
8012 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8013 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8014 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
8017 emit_move_insn (target, op1);
8022 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8023 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8029 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8030 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8035 /* ??? Can optimize bitwise operations with one arg constant.
8036 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8037 and (a bitwise1 b) bitwise2 b (etc)
8038 but that is probably not worth while. */
8040 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8041 boolean values when we want in all cases to compute both of them. In
8042 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8043 as actual zero-or-1 values and then bitwise anding. In cases where
8044 there cannot be any side effects, better code would be made by
8045 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8046 how to recognize those cases. */
8048 case TRUTH_AND_EXPR:
8050 this_optab = and_optab;
8055 this_optab = ior_optab;
8058 case TRUTH_XOR_EXPR:
8060 this_optab = xor_optab;
8067 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8069 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8070 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8073 /* Could determine the answer when only additive constants differ. Also,
8074 the addition of one can be handled by changing the condition. */
8081 case UNORDERED_EXPR:
8088 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8092 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8093 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8095 && GET_CODE (original_target) == REG
8096 && (GET_MODE (original_target)
8097 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8099 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8102 if (temp != original_target)
8103 temp = copy_to_reg (temp);
8105 op1 = gen_label_rtx ();
8106 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8107 GET_MODE (temp), unsignedp, 0, op1);
8108 emit_move_insn (temp, const1_rtx);
8113 /* If no set-flag instruction, must generate a conditional
8114 store into a temporary variable. Drop through
8115 and handle this like && and ||. */
8117 case TRUTH_ANDIF_EXPR:
8118 case TRUTH_ORIF_EXPR:
8120 && (target == 0 || ! safe_from_p (target, exp, 1)
8121 /* Make sure we don't have a hard reg (such as function's return
8122 value) live across basic blocks, if not optimizing. */
8123 || (!optimize && GET_CODE (target) == REG
8124 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8125 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8128 emit_clr_insn (target);
8130 op1 = gen_label_rtx ();
8131 jumpifnot (exp, op1);
8134 emit_0_to_1_insn (target);
8137 return ignore ? const0_rtx : target;
8139 case TRUTH_NOT_EXPR:
8140 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8141 /* The parser is careful to generate TRUTH_NOT_EXPR
8142 only with operands that are always zero or one. */
8143 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8144 target, 1, OPTAB_LIB_WIDEN);
8150 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8152 return expand_expr (TREE_OPERAND (exp, 1),
8153 (ignore ? const0_rtx : target),
8157 /* If we would have a "singleton" (see below) were it not for a
8158 conversion in each arm, bring that conversion back out. */
8159 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8160 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8161 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8162 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8164 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8165 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8167 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8168 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8169 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8170 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8171 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8172 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8173 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8174 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8175 return expand_expr (build1 (NOP_EXPR, type,
8176 build (COND_EXPR, TREE_TYPE (iftrue),
8177 TREE_OPERAND (exp, 0),
8179 target, tmode, modifier);
8183 /* Note that COND_EXPRs whose type is a structure or union
8184 are required to be constructed to contain assignments of
8185 a temporary variable, so that we can evaluate them here
8186 for side effect only. If type is void, we must do likewise. */
8188 /* If an arm of the branch requires a cleanup,
8189 only that cleanup is performed. */
8192 tree binary_op = 0, unary_op = 0;
8194 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8195 convert it to our mode, if necessary. */
8196 if (integer_onep (TREE_OPERAND (exp, 1))
8197 && integer_zerop (TREE_OPERAND (exp, 2))
8198 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8202 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8207 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8208 if (GET_MODE (op0) == mode)
8212 target = gen_reg_rtx (mode);
8213 convert_move (target, op0, unsignedp);
8217 /* Check for X ? A + B : A. If we have this, we can copy A to the
8218 output and conditionally add B. Similarly for unary operations.
8219 Don't do this if X has side-effects because those side effects
8220 might affect A or B and the "?" operation is a sequence point in
8221 ANSI. (operand_equal_p tests for side effects.) */
8223 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8224 && operand_equal_p (TREE_OPERAND (exp, 2),
8225 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8226 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8227 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8228 && operand_equal_p (TREE_OPERAND (exp, 1),
8229 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8230 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8231 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8232 && operand_equal_p (TREE_OPERAND (exp, 2),
8233 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8234 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8235 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8236 && operand_equal_p (TREE_OPERAND (exp, 1),
8237 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8238 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8240 /* If we are not to produce a result, we have no target. Otherwise,
8241 if a target was specified use it; it will not be used as an
8242 intermediate target unless it is safe. If no target, use a
8247 else if (original_target
8248 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8249 || (singleton && GET_CODE (original_target) == REG
8250 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8251 && original_target == var_rtx (singleton)))
8252 && GET_MODE (original_target) == mode
8253 #ifdef HAVE_conditional_move
8254 && (! can_conditionally_move_p (mode)
8255 || GET_CODE (original_target) == REG
8256 || TREE_ADDRESSABLE (type))
8258 && (GET_CODE (original_target) != MEM
8259 || TREE_ADDRESSABLE (type)))
8260 temp = original_target;
8261 else if (TREE_ADDRESSABLE (type))
8264 temp = assign_temp (type, 0, 0, 1);
8266 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8267 do the test of X as a store-flag operation, do this as
8268 A + ((X != 0) << log C). Similarly for other simple binary
8269 operators. Only do for C == 1 if BRANCH_COST is low. */
8270 if (temp && singleton && binary_op
8271 && (TREE_CODE (binary_op) == PLUS_EXPR
8272 || TREE_CODE (binary_op) == MINUS_EXPR
8273 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8274 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8275 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8276 : integer_onep (TREE_OPERAND (binary_op, 1)))
8277 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8280 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8281 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8282 ? addv_optab : add_optab)
8283 : TREE_CODE (binary_op) == MINUS_EXPR
8284 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8285 ? subv_optab : sub_optab)
8286 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8289 /* If we had X ? A : A + 1, do this as A + (X == 0).
8291 We have to invert the truth value here and then put it
8292 back later if do_store_flag fails. We cannot simply copy
8293 TREE_OPERAND (exp, 0) to another variable and modify that
8294 because invert_truthvalue can modify the tree pointed to
8296 if (singleton == TREE_OPERAND (exp, 1))
8297 TREE_OPERAND (exp, 0)
8298 = invert_truthvalue (TREE_OPERAND (exp, 0));
8300 result = do_store_flag (TREE_OPERAND (exp, 0),
8301 (safe_from_p (temp, singleton, 1)
8303 mode, BRANCH_COST <= 1);
8305 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8306 result = expand_shift (LSHIFT_EXPR, mode, result,
8307 build_int_2 (tree_log2
8311 (safe_from_p (temp, singleton, 1)
8312 ? temp : NULL_RTX), 0);
8316 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8317 return expand_binop (mode, boptab, op1, result, temp,
8318 unsignedp, OPTAB_LIB_WIDEN);
8320 else if (singleton == TREE_OPERAND (exp, 1))
8321 TREE_OPERAND (exp, 0)
8322 = invert_truthvalue (TREE_OPERAND (exp, 0));
8325 do_pending_stack_adjust ();
8327 op0 = gen_label_rtx ();
8329 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8333 /* If the target conflicts with the other operand of the
8334 binary op, we can't use it. Also, we can't use the target
8335 if it is a hard register, because evaluating the condition
8336 might clobber it. */
8338 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8339 || (GET_CODE (temp) == REG
8340 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8341 temp = gen_reg_rtx (mode);
8342 store_expr (singleton, temp, 0);
8345 expand_expr (singleton,
8346 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8347 if (singleton == TREE_OPERAND (exp, 1))
8348 jumpif (TREE_OPERAND (exp, 0), op0);
8350 jumpifnot (TREE_OPERAND (exp, 0), op0);
8352 start_cleanup_deferral ();
8353 if (binary_op && temp == 0)
8354 /* Just touch the other operand. */
8355 expand_expr (TREE_OPERAND (binary_op, 1),
8356 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8358 store_expr (build (TREE_CODE (binary_op), type,
8359 make_tree (type, temp),
8360 TREE_OPERAND (binary_op, 1)),
8363 store_expr (build1 (TREE_CODE (unary_op), type,
8364 make_tree (type, temp)),
8368 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8369 comparison operator. If we have one of these cases, set the
8370 output to A, branch on A (cse will merge these two references),
8371 then set the output to FOO. */
8373 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8374 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8375 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8376 TREE_OPERAND (exp, 1), 0)
8377 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8378 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8379 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8381 if (GET_CODE (temp) == REG
8382 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8383 temp = gen_reg_rtx (mode);
8384 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8385 jumpif (TREE_OPERAND (exp, 0), op0);
8387 start_cleanup_deferral ();
8388 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8392 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8393 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8394 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8395 TREE_OPERAND (exp, 2), 0)
8396 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8397 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8398 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8400 if (GET_CODE (temp) == REG
8401 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8402 temp = gen_reg_rtx (mode);
8403 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8404 jumpifnot (TREE_OPERAND (exp, 0), op0);
8406 start_cleanup_deferral ();
8407 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8412 op1 = gen_label_rtx ();
8413 jumpifnot (TREE_OPERAND (exp, 0), op0);
8415 start_cleanup_deferral ();
8417 /* One branch of the cond can be void, if it never returns. For
8418 example A ? throw : E */
8420 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8421 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8423 expand_expr (TREE_OPERAND (exp, 1),
8424 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8425 end_cleanup_deferral ();
8427 emit_jump_insn (gen_jump (op1));
8430 start_cleanup_deferral ();
8432 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8433 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8435 expand_expr (TREE_OPERAND (exp, 2),
8436 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8439 end_cleanup_deferral ();
8450 /* Something needs to be initialized, but we didn't know
8451 where that thing was when building the tree. For example,
8452 it could be the return value of a function, or a parameter
8453 to a function which lays down in the stack, or a temporary
8454 variable which must be passed by reference.
8456 We guarantee that the expression will either be constructed
8457 or copied into our original target. */
8459 tree slot = TREE_OPERAND (exp, 0);
8460 tree cleanups = NULL_TREE;
8463 if (TREE_CODE (slot) != VAR_DECL)
8467 target = original_target;
8469 /* Set this here so that if we get a target that refers to a
8470 register variable that's already been used, put_reg_into_stack
8471 knows that it should fix up those uses. */
8472 TREE_USED (slot) = 1;
8476 if (DECL_RTL_SET_P (slot))
8478 target = DECL_RTL (slot);
8479 /* If we have already expanded the slot, so don't do
8481 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8486 target = assign_temp (type, 2, 0, 1);
8487 /* All temp slots at this level must not conflict. */
8488 preserve_temp_slots (target);
8489 SET_DECL_RTL (slot, target);
8490 if (TREE_ADDRESSABLE (slot))
8491 put_var_into_stack (slot);
8493 /* Since SLOT is not known to the called function
8494 to belong to its stack frame, we must build an explicit
8495 cleanup. This case occurs when we must build up a reference
8496 to pass the reference as an argument. In this case,
8497 it is very likely that such a reference need not be
8500 if (TREE_OPERAND (exp, 2) == 0)
8501 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8502 cleanups = TREE_OPERAND (exp, 2);
8507 /* This case does occur, when expanding a parameter which
8508 needs to be constructed on the stack. The target
8509 is the actual stack address that we want to initialize.
8510 The function we call will perform the cleanup in this case. */
8512 /* If we have already assigned it space, use that space,
8513 not target that we were passed in, as our target
8514 parameter is only a hint. */
8515 if (DECL_RTL_SET_P (slot))
8517 target = DECL_RTL (slot);
8518 /* If we have already expanded the slot, so don't do
8520 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8525 SET_DECL_RTL (slot, target);
8526 /* If we must have an addressable slot, then make sure that
8527 the RTL that we just stored in slot is OK. */
8528 if (TREE_ADDRESSABLE (slot))
8529 put_var_into_stack (slot);
8533 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8534 /* Mark it as expanded. */
8535 TREE_OPERAND (exp, 1) = NULL_TREE;
8537 store_expr (exp1, target, 0);
8539 expand_decl_cleanup (NULL_TREE, cleanups);
8546 tree lhs = TREE_OPERAND (exp, 0);
8547 tree rhs = TREE_OPERAND (exp, 1);
8549 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8555 /* If lhs is complex, expand calls in rhs before computing it.
8556 That's so we don't compute a pointer and save it over a
8557 call. If lhs is simple, compute it first so we can give it
8558 as a target if the rhs is just a call. This avoids an
8559 extra temp and copy and that prevents a partial-subsumption
8560 which makes bad code. Actually we could treat
8561 component_ref's of vars like vars. */
8563 tree lhs = TREE_OPERAND (exp, 0);
8564 tree rhs = TREE_OPERAND (exp, 1);
8568 /* Check for |= or &= of a bitfield of size one into another bitfield
8569 of size 1. In this case, (unless we need the result of the
8570 assignment) we can do this more efficiently with a
8571 test followed by an assignment, if necessary.
8573 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8574 things change so we do, this code should be enhanced to
8577 && TREE_CODE (lhs) == COMPONENT_REF
8578 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8579 || TREE_CODE (rhs) == BIT_AND_EXPR)
8580 && TREE_OPERAND (rhs, 0) == lhs
8581 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8582 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8583 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8585 rtx label = gen_label_rtx ();
8587 do_jump (TREE_OPERAND (rhs, 1),
8588 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8589 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8590 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8591 (TREE_CODE (rhs) == BIT_IOR_EXPR
8593 : integer_zero_node)),
8595 do_pending_stack_adjust ();
8600 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8606 if (!TREE_OPERAND (exp, 0))
8607 expand_null_return ();
8609 expand_return (TREE_OPERAND (exp, 0));
8612 case PREINCREMENT_EXPR:
8613 case PREDECREMENT_EXPR:
8614 return expand_increment (exp, 0, ignore);
8616 case POSTINCREMENT_EXPR:
8617 case POSTDECREMENT_EXPR:
8618 /* Faster to treat as pre-increment if result is not used. */
8619 return expand_increment (exp, ! ignore, ignore);
8622 /* If nonzero, TEMP will be set to the address of something that might
8623 be a MEM corresponding to a stack slot. */
8626 /* Are we taking the address of a nested function? */
8627 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8628 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8629 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8630 && ! TREE_STATIC (exp))
8632 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8633 op0 = force_operand (op0, target);
8635 /* If we are taking the address of something erroneous, just
8637 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8639 /* If we are taking the address of a constant and are at the
8640 top level, we have to use output_constant_def since we can't
8641 call force_const_mem at top level. */
8643 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8644 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8646 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8649 /* We make sure to pass const0_rtx down if we came in with
8650 ignore set, to avoid doing the cleanups twice for something. */
8651 op0 = expand_expr (TREE_OPERAND (exp, 0),
8652 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8653 (modifier == EXPAND_INITIALIZER
8654 ? modifier : EXPAND_CONST_ADDRESS));
8656 /* If we are going to ignore the result, OP0 will have been set
8657 to const0_rtx, so just return it. Don't get confused and
8658 think we are taking the address of the constant. */
8662 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8663 clever and returns a REG when given a MEM. */
8664 op0 = protect_from_queue (op0, 1);
8666 /* We would like the object in memory. If it is a constant, we can
8667 have it be statically allocated into memory. For a non-constant,
8668 we need to allocate some memory and store the value into it. */
8670 if (CONSTANT_P (op0))
8671 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8673 else if (GET_CODE (op0) == MEM)
8675 mark_temp_addr_taken (op0);
8676 temp = XEXP (op0, 0);
8679 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8680 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8681 || GET_CODE (op0) == PARALLEL)
8683 /* If this object is in a register, it must be not
8685 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8686 tree nt = build_qualified_type (inner_type,
8687 (TYPE_QUALS (inner_type)
8688 | TYPE_QUAL_CONST));
8689 rtx memloc = assign_temp (nt, 1, 1, 1);
8691 mark_temp_addr_taken (memloc);
8692 if (GET_CODE (op0) == PARALLEL)
8693 /* Handle calls that pass values in multiple non-contiguous
8694 locations. The Irix 6 ABI has examples of this. */
8695 emit_group_store (memloc, op0, int_size_in_bytes (inner_type));
8697 emit_move_insn (memloc, op0);
8701 if (GET_CODE (op0) != MEM)
8704 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8706 temp = XEXP (op0, 0);
8707 #ifdef POINTERS_EXTEND_UNSIGNED
8708 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8709 && mode == ptr_mode)
8710 temp = convert_memory_address (ptr_mode, temp);
8715 op0 = force_operand (XEXP (op0, 0), target);
8718 if (flag_force_addr && GET_CODE (op0) != REG)
8719 op0 = force_reg (Pmode, op0);
8721 if (GET_CODE (op0) == REG
8722 && ! REG_USERVAR_P (op0))
8723 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8725 /* If we might have had a temp slot, add an equivalent address
8728 update_temp_slot_address (temp, op0);
8730 #ifdef POINTERS_EXTEND_UNSIGNED
8731 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8732 && mode == ptr_mode)
8733 op0 = convert_memory_address (ptr_mode, op0);
8738 case ENTRY_VALUE_EXPR:
8741 /* COMPLEX type for Extended Pascal & Fortran */
8744 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8747 /* Get the rtx code of the operands. */
8748 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8749 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8752 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8756 /* Move the real (op0) and imaginary (op1) parts to their location. */
8757 emit_move_insn (gen_realpart (mode, target), op0);
8758 emit_move_insn (gen_imagpart (mode, target), op1);
8760 insns = get_insns ();
8763 /* Complex construction should appear as a single unit. */
8764 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8765 each with a separate pseudo as destination.
8766 It's not correct for flow to treat them as a unit. */
8767 if (GET_CODE (target) != CONCAT)
8768 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8776 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8777 return gen_realpart (mode, op0);
8780 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8781 return gen_imagpart (mode, op0);
8785 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8789 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8792 target = gen_reg_rtx (mode);
8796 /* Store the realpart and the negated imagpart to target. */
8797 emit_move_insn (gen_realpart (partmode, target),
8798 gen_realpart (partmode, op0));
8800 imag_t = gen_imagpart (partmode, target);
8801 temp = expand_unop (partmode,
8802 ! unsignedp && flag_trapv
8803 && (GET_MODE_CLASS(partmode) == MODE_INT)
8804 ? negv_optab : neg_optab,
8805 gen_imagpart (partmode, op0), imag_t, 0);
8807 emit_move_insn (imag_t, temp);
8809 insns = get_insns ();
8812 /* Conjugate should appear as a single unit
8813 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8814 each with a separate pseudo as destination.
8815 It's not correct for flow to treat them as a unit. */
8816 if (GET_CODE (target) != CONCAT)
8817 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8824 case TRY_CATCH_EXPR:
8826 tree handler = TREE_OPERAND (exp, 1);
8828 expand_eh_region_start ();
8830 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8832 expand_eh_region_end_cleanup (handler);
8837 case TRY_FINALLY_EXPR:
8839 tree try_block = TREE_OPERAND (exp, 0);
8840 tree finally_block = TREE_OPERAND (exp, 1);
8841 rtx finally_label = gen_label_rtx ();
8842 rtx done_label = gen_label_rtx ();
8843 rtx return_link = gen_reg_rtx (Pmode);
8844 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8845 (tree) finally_label, (tree) return_link);
8846 TREE_SIDE_EFFECTS (cleanup) = 1;
8848 /* Start a new binding layer that will keep track of all cleanup
8849 actions to be performed. */
8850 expand_start_bindings (2);
8852 target_temp_slot_level = temp_slot_level;
8854 expand_decl_cleanup (NULL_TREE, cleanup);
8855 op0 = expand_expr (try_block, target, tmode, modifier);
8857 preserve_temp_slots (op0);
8858 expand_end_bindings (NULL_TREE, 0, 0);
8859 emit_jump (done_label);
8860 emit_label (finally_label);
8861 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8862 emit_indirect_jump (return_link);
8863 emit_label (done_label);
8867 case GOTO_SUBROUTINE_EXPR:
8869 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8870 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8871 rtx return_address = gen_label_rtx ();
8872 emit_move_insn (return_link,
8873 gen_rtx_LABEL_REF (Pmode, return_address));
8875 emit_label (return_address);
8880 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8883 return get_exception_pointer (cfun);
8886 /* Function descriptors are not valid except for as
8887 initialization constants, and should not be expanded. */
8891 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8894 /* Here to do an ordinary binary operator, generating an instruction
8895 from the optab already placed in `this_optab'. */
8897 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8899 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8900 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8902 temp = expand_binop (mode, this_optab, op0, op1, target,
8903 unsignedp, OPTAB_LIB_WIDEN);
8909 /* Similar to expand_expr, except that we don't specify a target, target
8910 mode, or modifier and we return the alignment of the inner type. This is
8911 used in cases where it is not necessary to align the result to the
8912 alignment of its type as long as we know the alignment of the result, for
8913 example for comparisons of BLKmode values. */
8916 expand_expr_unaligned (exp, palign)
8918 unsigned int *palign;
8921 tree type = TREE_TYPE (exp);
8922 enum machine_mode mode = TYPE_MODE (type);
8924 /* Default the alignment we return to that of the type. */
8925 *palign = TYPE_ALIGN (type);
8927 /* The only cases in which we do anything special is if the resulting mode
8929 if (mode != BLKmode)
8930 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8932 switch (TREE_CODE (exp))
8936 case NON_LVALUE_EXPR:
8937 /* Conversions between BLKmode values don't change the underlying
8938 alignment or value. */
8939 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8940 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8944 /* Much of the code for this case is copied directly from expand_expr.
8945 We need to duplicate it here because we will do something different
8946 in the fall-through case, so we need to handle the same exceptions
8949 tree array = TREE_OPERAND (exp, 0);
8950 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8951 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8952 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8955 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8958 /* Optimize the special-case of a zero lower bound.
8960 We convert the low_bound to sizetype to avoid some problems
8961 with constant folding. (E.g. suppose the lower bound is 1,
8962 and its mode is QI. Without the conversion, (ARRAY
8963 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8964 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8966 if (! integer_zerop (low_bound))
8967 index = size_diffop (index, convert (sizetype, low_bound));
8969 /* If this is a constant index into a constant array,
8970 just get the value from the array. Handle both the cases when
8971 we have an explicit constructor and when our operand is a variable
8972 that was declared const. */
8974 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8975 && host_integerp (index, 0)
8976 && 0 > compare_tree_int (index,
8977 list_length (CONSTRUCTOR_ELTS
8978 (TREE_OPERAND (exp, 0)))))
8982 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8983 i = tree_low_cst (index, 0);
8984 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8988 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8991 else if (optimize >= 1
8992 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8993 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8994 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8996 if (TREE_CODE (index) == INTEGER_CST)
8998 tree init = DECL_INITIAL (array);
9000 if (TREE_CODE (init) == CONSTRUCTOR)
9004 for (elem = CONSTRUCTOR_ELTS (init);
9005 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
9006 elem = TREE_CHAIN (elem))
9010 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
9020 case ARRAY_RANGE_REF:
9021 /* If the operand is a CONSTRUCTOR, we can just extract the
9022 appropriate field if it is present. Don't do this if we have
9023 already written the data since we want to refer to that copy
9024 and varasm.c assumes that's what we'll do. */
9025 if (TREE_CODE (exp) == COMPONENT_REF
9026 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9027 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
9031 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
9032 elt = TREE_CHAIN (elt))
9033 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
9034 /* Note that unlike the case in expand_expr, we know this is
9035 BLKmode and hence not an integer. */
9036 return expand_expr_unaligned (TREE_VALUE (elt), palign);
9040 enum machine_mode mode1;
9041 HOST_WIDE_INT bitsize, bitpos;
9044 unsigned int alignment;
9046 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9047 &mode1, &unsignedp, &volatilep,
9050 /* If we got back the original object, something is wrong. Perhaps
9051 we are evaluating an expression too early. In any event, don't
9052 infinitely recurse. */
9056 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9058 /* If this is a constant, put it into a register if it is a
9059 legitimate constant and OFFSET is 0 and memory if it isn't. */
9060 if (CONSTANT_P (op0))
9062 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9064 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9066 op0 = force_reg (inner_mode, op0);
9068 op0 = validize_mem (force_const_mem (inner_mode, op0));
9073 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9075 /* If this object is in a register, put it into memory.
9076 This case can't occur in C, but can in Ada if we have
9077 unchecked conversion of an expression from a scalar type to
9078 an array or record type. */
9079 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9080 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9082 tree nt = build_qualified_type (TREE_TYPE (tem),
9083 (TYPE_QUALS (TREE_TYPE (tem))
9084 | TYPE_QUAL_CONST));
9085 rtx memloc = assign_temp (nt, 1, 1, 1);
9087 mark_temp_addr_taken (memloc);
9088 emit_move_insn (memloc, op0);
9092 if (GET_CODE (op0) != MEM)
9095 if (GET_MODE (offset_rtx) != ptr_mode)
9096 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9098 #ifdef POINTERS_EXTEND_UNSIGNED
9099 if (GET_MODE (offset_rtx) != Pmode)
9100 offset_rtx = convert_memory_address (Pmode, offset_rtx);
9103 op0 = offset_address (op0, offset_rtx,
9104 highest_pow2_factor (offset));
9107 /* Don't forget about volatility even if this is a bitfield. */
9108 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9110 op0 = copy_rtx (op0);
9111 MEM_VOLATILE_P (op0) = 1;
9114 /* Check the access. */
9115 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9120 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9121 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9123 /* Check the access right of the pointer. */
9124 in_check_memory_usage = 1;
9125 if (size > BITS_PER_UNIT)
9126 emit_library_call (chkr_check_addr_libfunc,
9127 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9128 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9129 TYPE_MODE (sizetype),
9130 GEN_INT (MEMORY_USE_RO),
9131 TYPE_MODE (integer_type_node));
9132 in_check_memory_usage = 0;
9135 /* In cases where an aligned union has an unaligned object
9136 as a field, we might be extracting a BLKmode value from
9137 an integer-mode (e.g., SImode) object. Handle this case
9138 by doing the extract into an object as wide as the field
9139 (which we know to be the width of a basic mode), then
9140 storing into memory, and changing the mode to BLKmode.
9141 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9142 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9143 if (mode1 == VOIDmode
9144 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9145 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9146 && (TYPE_ALIGN (type) > alignment
9147 || bitpos % TYPE_ALIGN (type) != 0)))
9149 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9151 if (ext_mode == BLKmode)
9153 /* In this case, BITPOS must start at a byte boundary. */
9154 if (GET_CODE (op0) != MEM
9155 || bitpos % BITS_PER_UNIT != 0)
9158 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9162 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9164 rtx new = assign_temp (nt, 0, 1, 1);
9166 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9167 unsignedp, NULL_RTX, ext_mode,
9169 int_size_in_bytes (TREE_TYPE (tem)));
9171 /* If the result is a record type and BITSIZE is narrower than
9172 the mode of OP0, an integral mode, and this is a big endian
9173 machine, we must put the field into the high-order bits. */
9174 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9175 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9176 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9177 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9178 size_int (GET_MODE_BITSIZE
9183 emit_move_insn (new, op0);
9184 op0 = copy_rtx (new);
9185 PUT_MODE (op0, BLKmode);
9189 /* Get a reference to just this component. */
9190 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9192 set_mem_attributes (op0, exp, 0);
9194 /* Adjust the alignment in case the bit position is not
9195 a multiple of the alignment of the inner object. */
9196 while (bitpos % alignment != 0)
9199 if (GET_CODE (XEXP (op0, 0)) == REG)
9200 mark_reg_pointer (XEXP (op0, 0), alignment);
9202 MEM_IN_STRUCT_P (op0) = 1;
9203 MEM_VOLATILE_P (op0) |= volatilep;
9205 *palign = alignment;
9214 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9217 /* Return the tree node if a ARG corresponds to a string constant or zero
9218 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9219 in bytes within the string that ARG is accessing. The type of the
9220 offset will be `sizetype'. */
9223 string_constant (arg, ptr_offset)
9229 if (TREE_CODE (arg) == ADDR_EXPR
9230 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9232 *ptr_offset = size_zero_node;
9233 return TREE_OPERAND (arg, 0);
9235 else if (TREE_CODE (arg) == PLUS_EXPR)
9237 tree arg0 = TREE_OPERAND (arg, 0);
9238 tree arg1 = TREE_OPERAND (arg, 1);
9243 if (TREE_CODE (arg0) == ADDR_EXPR
9244 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9246 *ptr_offset = convert (sizetype, arg1);
9247 return TREE_OPERAND (arg0, 0);
9249 else if (TREE_CODE (arg1) == ADDR_EXPR
9250 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9252 *ptr_offset = convert (sizetype, arg0);
9253 return TREE_OPERAND (arg1, 0);
9260 /* Expand code for a post- or pre- increment or decrement
9261 and return the RTX for the result.
9262 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9265 expand_increment (exp, post, ignore)
9271 tree incremented = TREE_OPERAND (exp, 0);
9272 optab this_optab = add_optab;
9274 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9275 int op0_is_copy = 0;
9276 int single_insn = 0;
9277 /* 1 means we can't store into OP0 directly,
9278 because it is a subreg narrower than a word,
9279 and we don't dare clobber the rest of the word. */
9282 /* Stabilize any component ref that might need to be
9283 evaluated more than once below. */
9285 || TREE_CODE (incremented) == BIT_FIELD_REF
9286 || (TREE_CODE (incremented) == COMPONENT_REF
9287 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9288 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9289 incremented = stabilize_reference (incremented);
9290 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9291 ones into save exprs so that they don't accidentally get evaluated
9292 more than once by the code below. */
9293 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9294 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9295 incremented = save_expr (incremented);
9297 /* Compute the operands as RTX.
9298 Note whether OP0 is the actual lvalue or a copy of it:
9299 I believe it is a copy iff it is a register or subreg
9300 and insns were generated in computing it. */
9302 temp = get_last_insn ();
9303 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9305 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9306 in place but instead must do sign- or zero-extension during assignment,
9307 so we copy it into a new register and let the code below use it as
9310 Note that we can safely modify this SUBREG since it is know not to be
9311 shared (it was made by the expand_expr call above). */
9313 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9316 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9320 else if (GET_CODE (op0) == SUBREG
9321 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9323 /* We cannot increment this SUBREG in place. If we are
9324 post-incrementing, get a copy of the old value. Otherwise,
9325 just mark that we cannot increment in place. */
9327 op0 = copy_to_reg (op0);
9332 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9333 && temp != get_last_insn ());
9334 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9335 EXPAND_MEMORY_USE_BAD);
9337 /* Decide whether incrementing or decrementing. */
9338 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9339 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9340 this_optab = sub_optab;
9342 /* Convert decrement by a constant into a negative increment. */
9343 if (this_optab == sub_optab
9344 && GET_CODE (op1) == CONST_INT)
9346 op1 = GEN_INT (-INTVAL (op1));
9347 this_optab = add_optab;
9350 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9351 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9353 /* For a preincrement, see if we can do this with a single instruction. */
9356 icode = (int) this_optab->handlers[(int) mode].insn_code;
9357 if (icode != (int) CODE_FOR_nothing
9358 /* Make sure that OP0 is valid for operands 0 and 1
9359 of the insn we want to queue. */
9360 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9361 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9362 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9366 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9367 then we cannot just increment OP0. We must therefore contrive to
9368 increment the original value. Then, for postincrement, we can return
9369 OP0 since it is a copy of the old value. For preincrement, expand here
9370 unless we can do it with a single insn.
9372 Likewise if storing directly into OP0 would clobber high bits
9373 we need to preserve (bad_subreg). */
9374 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9376 /* This is the easiest way to increment the value wherever it is.
9377 Problems with multiple evaluation of INCREMENTED are prevented
9378 because either (1) it is a component_ref or preincrement,
9379 in which case it was stabilized above, or (2) it is an array_ref
9380 with constant index in an array in a register, which is
9381 safe to reevaluate. */
9382 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9383 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9384 ? MINUS_EXPR : PLUS_EXPR),
9387 TREE_OPERAND (exp, 1));
9389 while (TREE_CODE (incremented) == NOP_EXPR
9390 || TREE_CODE (incremented) == CONVERT_EXPR)
9392 newexp = convert (TREE_TYPE (incremented), newexp);
9393 incremented = TREE_OPERAND (incremented, 0);
9396 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9397 return post ? op0 : temp;
9402 /* We have a true reference to the value in OP0.
9403 If there is an insn to add or subtract in this mode, queue it.
9404 Queueing the increment insn avoids the register shuffling
9405 that often results if we must increment now and first save
9406 the old value for subsequent use. */
9408 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9409 op0 = stabilize (op0);
9412 icode = (int) this_optab->handlers[(int) mode].insn_code;
9413 if (icode != (int) CODE_FOR_nothing
9414 /* Make sure that OP0 is valid for operands 0 and 1
9415 of the insn we want to queue. */
9416 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9417 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9419 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9420 op1 = force_reg (mode, op1);
9422 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9424 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9426 rtx addr = (general_operand (XEXP (op0, 0), mode)
9427 ? force_reg (Pmode, XEXP (op0, 0))
9428 : copy_to_reg (XEXP (op0, 0)));
9431 op0 = replace_equiv_address (op0, addr);
9432 temp = force_reg (GET_MODE (op0), op0);
9433 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9434 op1 = force_reg (mode, op1);
9436 /* The increment queue is LIFO, thus we have to `queue'
9437 the instructions in reverse order. */
9438 enqueue_insn (op0, gen_move_insn (op0, temp));
9439 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9444 /* Preincrement, or we can't increment with one simple insn. */
9446 /* Save a copy of the value before inc or dec, to return it later. */
9447 temp = value = copy_to_reg (op0);
9449 /* Arrange to return the incremented value. */
9450 /* Copy the rtx because expand_binop will protect from the queue,
9451 and the results of that would be invalid for us to return
9452 if our caller does emit_queue before using our result. */
9453 temp = copy_rtx (value = op0);
9455 /* Increment however we can. */
9456 op1 = expand_binop (mode, this_optab, value, op1,
9457 current_function_check_memory_usage ? NULL_RTX : op0,
9458 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9459 /* Make sure the value is stored into OP0. */
9461 emit_move_insn (op0, op1);
9466 /* At the start of a function, record that we have no previously-pushed
9467 arguments waiting to be popped. */
9470 init_pending_stack_adjust ()
9472 pending_stack_adjust = 0;
9475 /* When exiting from function, if safe, clear out any pending stack adjust
9476 so the adjustment won't get done.
9478 Note, if the current function calls alloca, then it must have a
9479 frame pointer regardless of the value of flag_omit_frame_pointer. */
9482 clear_pending_stack_adjust ()
9484 #ifdef EXIT_IGNORE_STACK
9486 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9487 && EXIT_IGNORE_STACK
9488 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9489 && ! flag_inline_functions)
9491 stack_pointer_delta -= pending_stack_adjust,
9492 pending_stack_adjust = 0;
9497 /* Pop any previously-pushed arguments that have not been popped yet. */
9500 do_pending_stack_adjust ()
9502 if (inhibit_defer_pop == 0)
9504 if (pending_stack_adjust != 0)
9505 adjust_stack (GEN_INT (pending_stack_adjust));
9506 pending_stack_adjust = 0;
9510 /* Expand conditional expressions. */
9512 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9513 LABEL is an rtx of code CODE_LABEL, in this function and all the
9517 jumpifnot (exp, label)
9521 do_jump (exp, label, NULL_RTX);
9524 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9531 do_jump (exp, NULL_RTX, label);
9534 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9535 the result is zero, or IF_TRUE_LABEL if the result is one.
9536 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9537 meaning fall through in that case.
9539 do_jump always does any pending stack adjust except when it does not
9540 actually perform a jump. An example where there is no jump
9541 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9543 This function is responsible for optimizing cases such as
9544 &&, || and comparison operators in EXP. */
9547 do_jump (exp, if_false_label, if_true_label)
9549 rtx if_false_label, if_true_label;
9551 enum tree_code code = TREE_CODE (exp);
9552 /* Some cases need to create a label to jump to
9553 in order to properly fall through.
9554 These cases set DROP_THROUGH_LABEL nonzero. */
9555 rtx drop_through_label = 0;
9559 enum machine_mode mode;
9561 #ifdef MAX_INTEGER_COMPUTATION_MODE
9562 check_max_integer_computation_mode (exp);
9573 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9579 /* This is not true with #pragma weak */
9581 /* The address of something can never be zero. */
9583 emit_jump (if_true_label);
9588 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9589 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9590 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9591 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9594 /* If we are narrowing the operand, we have to do the compare in the
9596 if ((TYPE_PRECISION (TREE_TYPE (exp))
9597 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9599 case NON_LVALUE_EXPR:
9600 case REFERENCE_EXPR:
9605 /* These cannot change zero->non-zero or vice versa. */
9606 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9609 case WITH_RECORD_EXPR:
9610 /* Put the object on the placeholder list, recurse through our first
9611 operand, and pop the list. */
9612 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9614 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9615 placeholder_list = TREE_CHAIN (placeholder_list);
9619 /* This is never less insns than evaluating the PLUS_EXPR followed by
9620 a test and can be longer if the test is eliminated. */
9622 /* Reduce to minus. */
9623 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9624 TREE_OPERAND (exp, 0),
9625 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9626 TREE_OPERAND (exp, 1))));
9627 /* Process as MINUS. */
9631 /* Non-zero iff operands of minus differ. */
9632 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9633 TREE_OPERAND (exp, 0),
9634 TREE_OPERAND (exp, 1)),
9635 NE, NE, if_false_label, if_true_label);
9639 /* If we are AND'ing with a small constant, do this comparison in the
9640 smallest type that fits. If the machine doesn't have comparisons
9641 that small, it will be converted back to the wider comparison.
9642 This helps if we are testing the sign bit of a narrower object.
9643 combine can't do this for us because it can't know whether a
9644 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9646 if (! SLOW_BYTE_ACCESS
9647 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9648 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9649 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9650 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9651 && (type = type_for_mode (mode, 1)) != 0
9652 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9653 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9654 != CODE_FOR_nothing))
9656 do_jump (convert (type, exp), if_false_label, if_true_label);
9661 case TRUTH_NOT_EXPR:
9662 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9665 case TRUTH_ANDIF_EXPR:
9666 if (if_false_label == 0)
9667 if_false_label = drop_through_label = gen_label_rtx ();
9668 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9669 start_cleanup_deferral ();
9670 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9671 end_cleanup_deferral ();
9674 case TRUTH_ORIF_EXPR:
9675 if (if_true_label == 0)
9676 if_true_label = drop_through_label = gen_label_rtx ();
9677 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9678 start_cleanup_deferral ();
9679 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9680 end_cleanup_deferral ();
9685 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9686 preserve_temp_slots (NULL_RTX);
9690 do_pending_stack_adjust ();
9691 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9697 case ARRAY_RANGE_REF:
9699 HOST_WIDE_INT bitsize, bitpos;
9701 enum machine_mode mode;
9705 unsigned int alignment;
9707 /* Get description of this reference. We don't actually care
9708 about the underlying object here. */
9709 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9710 &unsignedp, &volatilep, &alignment);
9712 type = type_for_size (bitsize, unsignedp);
9713 if (! SLOW_BYTE_ACCESS
9714 && type != 0 && bitsize >= 0
9715 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9716 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9717 != CODE_FOR_nothing))
9719 do_jump (convert (type, exp), if_false_label, if_true_label);
9726 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9727 if (integer_onep (TREE_OPERAND (exp, 1))
9728 && integer_zerop (TREE_OPERAND (exp, 2)))
9729 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9731 else if (integer_zerop (TREE_OPERAND (exp, 1))
9732 && integer_onep (TREE_OPERAND (exp, 2)))
9733 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9737 rtx label1 = gen_label_rtx ();
9738 drop_through_label = gen_label_rtx ();
9740 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9742 start_cleanup_deferral ();
9743 /* Now the THEN-expression. */
9744 do_jump (TREE_OPERAND (exp, 1),
9745 if_false_label ? if_false_label : drop_through_label,
9746 if_true_label ? if_true_label : drop_through_label);
9747 /* In case the do_jump just above never jumps. */
9748 do_pending_stack_adjust ();
9749 emit_label (label1);
9751 /* Now the ELSE-expression. */
9752 do_jump (TREE_OPERAND (exp, 2),
9753 if_false_label ? if_false_label : drop_through_label,
9754 if_true_label ? if_true_label : drop_through_label);
9755 end_cleanup_deferral ();
9761 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9763 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9764 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9766 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9767 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9770 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9771 fold (build (EQ_EXPR, TREE_TYPE (exp),
9772 fold (build1 (REALPART_EXPR,
9773 TREE_TYPE (inner_type),
9775 fold (build1 (REALPART_EXPR,
9776 TREE_TYPE (inner_type),
9778 fold (build (EQ_EXPR, TREE_TYPE (exp),
9779 fold (build1 (IMAGPART_EXPR,
9780 TREE_TYPE (inner_type),
9782 fold (build1 (IMAGPART_EXPR,
9783 TREE_TYPE (inner_type),
9785 if_false_label, if_true_label);
9788 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9789 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9791 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9792 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9793 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9795 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9801 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9803 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9804 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9806 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9807 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9810 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9811 fold (build (NE_EXPR, TREE_TYPE (exp),
9812 fold (build1 (REALPART_EXPR,
9813 TREE_TYPE (inner_type),
9815 fold (build1 (REALPART_EXPR,
9816 TREE_TYPE (inner_type),
9818 fold (build (NE_EXPR, TREE_TYPE (exp),
9819 fold (build1 (IMAGPART_EXPR,
9820 TREE_TYPE (inner_type),
9822 fold (build1 (IMAGPART_EXPR,
9823 TREE_TYPE (inner_type),
9825 if_false_label, if_true_label);
9828 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9829 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9831 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9832 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9833 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9835 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9840 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9841 if (GET_MODE_CLASS (mode) == MODE_INT
9842 && ! can_compare_p (LT, mode, ccp_jump))
9843 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9845 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9849 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9850 if (GET_MODE_CLASS (mode) == MODE_INT
9851 && ! can_compare_p (LE, mode, ccp_jump))
9852 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9854 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9858 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9859 if (GET_MODE_CLASS (mode) == MODE_INT
9860 && ! can_compare_p (GT, mode, ccp_jump))
9861 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9863 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9867 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9868 if (GET_MODE_CLASS (mode) == MODE_INT
9869 && ! can_compare_p (GE, mode, ccp_jump))
9870 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9872 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9875 case UNORDERED_EXPR:
9878 enum rtx_code cmp, rcmp;
9881 if (code == UNORDERED_EXPR)
9882 cmp = UNORDERED, rcmp = ORDERED;
9884 cmp = ORDERED, rcmp = UNORDERED;
9885 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9888 if (! can_compare_p (cmp, mode, ccp_jump)
9889 && (can_compare_p (rcmp, mode, ccp_jump)
9890 /* If the target doesn't provide either UNORDERED or ORDERED
9891 comparisons, canonicalize on UNORDERED for the library. */
9892 || rcmp == UNORDERED))
9896 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9898 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9903 enum rtx_code rcode1;
9904 enum tree_code tcode2;
9928 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9929 if (can_compare_p (rcode1, mode, ccp_jump))
9930 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9934 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9935 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9938 /* If the target doesn't support combined unordered
9939 compares, decompose into UNORDERED + comparison. */
9940 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9941 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9942 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9943 do_jump (exp, if_false_label, if_true_label);
9949 __builtin_expect (<test>, 0) and
9950 __builtin_expect (<test>, 1)
9952 We need to do this here, so that <test> is not converted to a SCC
9953 operation on machines that use condition code registers and COMPARE
9954 like the PowerPC, and then the jump is done based on whether the SCC
9955 operation produced a 1 or 0. */
9957 /* Check for a built-in function. */
9958 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9960 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9961 tree arglist = TREE_OPERAND (exp, 1);
9963 if (TREE_CODE (fndecl) == FUNCTION_DECL
9964 && DECL_BUILT_IN (fndecl)
9965 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9966 && arglist != NULL_TREE
9967 && TREE_CHAIN (arglist) != NULL_TREE)
9969 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9972 if (seq != NULL_RTX)
9979 /* fall through and generate the normal code. */
9983 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9985 /* This is not needed any more and causes poor code since it causes
9986 comparisons and tests from non-SI objects to have different code
9988 /* Copy to register to avoid generating bad insns by cse
9989 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9990 if (!cse_not_expected && GET_CODE (temp) == MEM)
9991 temp = copy_to_reg (temp);
9993 do_pending_stack_adjust ();
9994 /* Do any postincrements in the expression that was tested. */
9997 if (GET_CODE (temp) == CONST_INT
9998 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9999 || GET_CODE (temp) == LABEL_REF)
10001 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10003 emit_jump (target);
10005 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10006 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10007 /* Note swapping the labels gives us not-equal. */
10008 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10009 else if (GET_MODE (temp) != VOIDmode)
10010 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10011 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10012 GET_MODE (temp), NULL_RTX, 0,
10013 if_false_label, if_true_label);
10018 if (drop_through_label)
10020 /* If do_jump produces code that might be jumped around,
10021 do any stack adjusts from that code, before the place
10022 where control merges in. */
10023 do_pending_stack_adjust ();
10024 emit_label (drop_through_label);
10028 /* Given a comparison expression EXP for values too wide to be compared
10029 with one insn, test the comparison and jump to the appropriate label.
10030 The code of EXP is ignored; we always test GT if SWAP is 0,
10031 and LT if SWAP is 1. */
10034 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10037 rtx if_false_label, if_true_label;
10039 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10040 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10041 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10042 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10044 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10047 /* Compare OP0 with OP1, word at a time, in mode MODE.
10048 UNSIGNEDP says to do unsigned comparison.
10049 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10052 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10053 enum machine_mode mode;
10056 rtx if_false_label, if_true_label;
10058 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10059 rtx drop_through_label = 0;
10062 if (! if_true_label || ! if_false_label)
10063 drop_through_label = gen_label_rtx ();
10064 if (! if_true_label)
10065 if_true_label = drop_through_label;
10066 if (! if_false_label)
10067 if_false_label = drop_through_label;
10069 /* Compare a word at a time, high order first. */
10070 for (i = 0; i < nwords; i++)
10072 rtx op0_word, op1_word;
10074 if (WORDS_BIG_ENDIAN)
10076 op0_word = operand_subword_force (op0, i, mode);
10077 op1_word = operand_subword_force (op1, i, mode);
10081 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10082 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10085 /* All but high-order word must be compared as unsigned. */
10086 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10087 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10088 NULL_RTX, if_true_label);
10090 /* Consider lower words only if these are equal. */
10091 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10092 NULL_RTX, 0, NULL_RTX, if_false_label);
10095 if (if_false_label)
10096 emit_jump (if_false_label);
10097 if (drop_through_label)
10098 emit_label (drop_through_label);
10101 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10102 with one insn, test the comparison and jump to the appropriate label. */
10105 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10107 rtx if_false_label, if_true_label;
10109 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10110 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10111 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10112 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10114 rtx drop_through_label = 0;
10116 if (! if_false_label)
10117 drop_through_label = if_false_label = gen_label_rtx ();
10119 for (i = 0; i < nwords; i++)
10120 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10121 operand_subword_force (op1, i, mode),
10122 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10123 word_mode, NULL_RTX, 0, if_false_label,
10127 emit_jump (if_true_label);
10128 if (drop_through_label)
10129 emit_label (drop_through_label);
10132 /* Jump according to whether OP0 is 0.
10133 We assume that OP0 has an integer mode that is too wide
10134 for the available compare insns. */
10137 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10139 rtx if_false_label, if_true_label;
10141 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10144 rtx drop_through_label = 0;
10146 /* The fastest way of doing this comparison on almost any machine is to
10147 "or" all the words and compare the result. If all have to be loaded
10148 from memory and this is a very wide item, it's possible this may
10149 be slower, but that's highly unlikely. */
10151 part = gen_reg_rtx (word_mode);
10152 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10153 for (i = 1; i < nwords && part != 0; i++)
10154 part = expand_binop (word_mode, ior_optab, part,
10155 operand_subword_force (op0, i, GET_MODE (op0)),
10156 part, 1, OPTAB_WIDEN);
10160 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10161 NULL_RTX, 0, if_false_label, if_true_label);
10166 /* If we couldn't do the "or" simply, do this with a series of compares. */
10167 if (! if_false_label)
10168 drop_through_label = if_false_label = gen_label_rtx ();
10170 for (i = 0; i < nwords; i++)
10171 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10172 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10173 if_false_label, NULL_RTX);
10176 emit_jump (if_true_label);
10178 if (drop_through_label)
10179 emit_label (drop_through_label);
10182 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10183 (including code to compute the values to be compared)
10184 and set (CC0) according to the result.
10185 The decision as to signed or unsigned comparison must be made by the caller.
10187 We force a stack adjustment unless there are currently
10188 things pushed on the stack that aren't yet used.
10190 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10193 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10194 size of MODE should be used. */
10197 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10199 enum rtx_code code;
10201 enum machine_mode mode;
10203 unsigned int align;
10207 /* If one operand is constant, make it the second one. Only do this
10208 if the other operand is not constant as well. */
10210 if (swap_commutative_operands_p (op0, op1))
10215 code = swap_condition (code);
10218 if (flag_force_mem)
10220 op0 = force_not_mem (op0);
10221 op1 = force_not_mem (op1);
10224 do_pending_stack_adjust ();
10226 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10227 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10231 /* There's no need to do this now that combine.c can eliminate lots of
10232 sign extensions. This can be less efficient in certain cases on other
10235 /* If this is a signed equality comparison, we can do it as an
10236 unsigned comparison since zero-extension is cheaper than sign
10237 extension and comparisons with zero are done as unsigned. This is
10238 the case even on machines that can do fast sign extension, since
10239 zero-extension is easier to combine with other operations than
10240 sign-extension is. If we are comparing against a constant, we must
10241 convert it to what it would look like unsigned. */
10242 if ((code == EQ || code == NE) && ! unsignedp
10243 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10245 if (GET_CODE (op1) == CONST_INT
10246 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10247 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10252 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10254 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10257 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10258 The decision as to signed or unsigned comparison must be made by the caller.
10260 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10263 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10264 size of MODE should be used. */
10267 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10268 if_false_label, if_true_label)
10270 enum rtx_code code;
10272 enum machine_mode mode;
10274 unsigned int align;
10275 rtx if_false_label, if_true_label;
10278 int dummy_true_label = 0;
10280 /* Reverse the comparison if that is safe and we want to jump if it is
10282 if (! if_true_label && ! FLOAT_MODE_P (mode))
10284 if_true_label = if_false_label;
10285 if_false_label = 0;
10286 code = reverse_condition (code);
10289 /* If one operand is constant, make it the second one. Only do this
10290 if the other operand is not constant as well. */
10292 if (swap_commutative_operands_p (op0, op1))
10297 code = swap_condition (code);
10300 if (flag_force_mem)
10302 op0 = force_not_mem (op0);
10303 op1 = force_not_mem (op1);
10306 do_pending_stack_adjust ();
10308 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10309 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10311 if (tem == const_true_rtx)
10314 emit_jump (if_true_label);
10318 if (if_false_label)
10319 emit_jump (if_false_label);
10325 /* There's no need to do this now that combine.c can eliminate lots of
10326 sign extensions. This can be less efficient in certain cases on other
10329 /* If this is a signed equality comparison, we can do it as an
10330 unsigned comparison since zero-extension is cheaper than sign
10331 extension and comparisons with zero are done as unsigned. This is
10332 the case even on machines that can do fast sign extension, since
10333 zero-extension is easier to combine with other operations than
10334 sign-extension is. If we are comparing against a constant, we must
10335 convert it to what it would look like unsigned. */
10336 if ((code == EQ || code == NE) && ! unsignedp
10337 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10339 if (GET_CODE (op1) == CONST_INT
10340 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10341 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10346 if (! if_true_label)
10348 dummy_true_label = 1;
10349 if_true_label = gen_label_rtx ();
10352 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10355 if (if_false_label)
10356 emit_jump (if_false_label);
10357 if (dummy_true_label)
10358 emit_label (if_true_label);
10361 /* Generate code for a comparison expression EXP (including code to compute
10362 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10363 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10364 generated code will drop through.
10365 SIGNED_CODE should be the rtx operation for this comparison for
10366 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10368 We force a stack adjustment unless there are currently
10369 things pushed on the stack that aren't yet used. */
10372 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10375 enum rtx_code signed_code, unsigned_code;
10376 rtx if_false_label, if_true_label;
10378 unsigned int align0, align1;
10381 enum machine_mode mode;
10383 enum rtx_code code;
10385 /* Don't crash if the comparison was erroneous. */
10386 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10387 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10390 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10391 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10394 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10395 mode = TYPE_MODE (type);
10396 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10397 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10398 || (GET_MODE_BITSIZE (mode)
10399 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10402 /* op0 might have been replaced by promoted constant, in which
10403 case the type of second argument should be used. */
10404 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10405 mode = TYPE_MODE (type);
10407 unsignedp = TREE_UNSIGNED (type);
10408 code = unsignedp ? unsigned_code : signed_code;
10410 #ifdef HAVE_canonicalize_funcptr_for_compare
10411 /* If function pointers need to be "canonicalized" before they can
10412 be reliably compared, then canonicalize them. */
10413 if (HAVE_canonicalize_funcptr_for_compare
10414 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10415 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10418 rtx new_op0 = gen_reg_rtx (mode);
10420 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10424 if (HAVE_canonicalize_funcptr_for_compare
10425 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10426 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10429 rtx new_op1 = gen_reg_rtx (mode);
10431 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10436 /* Do any postincrements in the expression that was tested. */
10439 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10441 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10442 MIN (align0, align1),
10443 if_false_label, if_true_label);
10446 /* Generate code to calculate EXP using a store-flag instruction
10447 and return an rtx for the result. EXP is either a comparison
10448 or a TRUTH_NOT_EXPR whose operand is a comparison.
10450 If TARGET is nonzero, store the result there if convenient.
10452 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10455 Return zero if there is no suitable set-flag instruction
10456 available on this machine.
10458 Once expand_expr has been called on the arguments of the comparison,
10459 we are committed to doing the store flag, since it is not safe to
10460 re-evaluate the expression. We emit the store-flag insn by calling
10461 emit_store_flag, but only expand the arguments if we have a reason
10462 to believe that emit_store_flag will be successful. If we think that
10463 it will, but it isn't, we have to simulate the store-flag with a
10464 set/jump/set sequence. */
10467 do_store_flag (exp, target, mode, only_cheap)
10470 enum machine_mode mode;
10473 enum rtx_code code;
10474 tree arg0, arg1, type;
10476 enum machine_mode operand_mode;
10480 enum insn_code icode;
10481 rtx subtarget = target;
10484 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10485 result at the end. We can't simply invert the test since it would
10486 have already been inverted if it were valid. This case occurs for
10487 some floating-point comparisons. */
10489 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10490 invert = 1, exp = TREE_OPERAND (exp, 0);
10492 arg0 = TREE_OPERAND (exp, 0);
10493 arg1 = TREE_OPERAND (exp, 1);
10495 /* Don't crash if the comparison was erroneous. */
10496 if (arg0 == error_mark_node || arg1 == error_mark_node)
10499 type = TREE_TYPE (arg0);
10500 operand_mode = TYPE_MODE (type);
10501 unsignedp = TREE_UNSIGNED (type);
10503 /* We won't bother with BLKmode store-flag operations because it would mean
10504 passing a lot of information to emit_store_flag. */
10505 if (operand_mode == BLKmode)
10508 /* We won't bother with store-flag operations involving function pointers
10509 when function pointers must be canonicalized before comparisons. */
10510 #ifdef HAVE_canonicalize_funcptr_for_compare
10511 if (HAVE_canonicalize_funcptr_for_compare
10512 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10513 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10515 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10516 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10517 == FUNCTION_TYPE))))
10524 /* Get the rtx comparison code to use. We know that EXP is a comparison
10525 operation of some type. Some comparisons against 1 and -1 can be
10526 converted to comparisons with zero. Do so here so that the tests
10527 below will be aware that we have a comparison with zero. These
10528 tests will not catch constants in the first operand, but constants
10529 are rarely passed as the first operand. */
10531 switch (TREE_CODE (exp))
10540 if (integer_onep (arg1))
10541 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10543 code = unsignedp ? LTU : LT;
10546 if (! unsignedp && integer_all_onesp (arg1))
10547 arg1 = integer_zero_node, code = LT;
10549 code = unsignedp ? LEU : LE;
10552 if (! unsignedp && integer_all_onesp (arg1))
10553 arg1 = integer_zero_node, code = GE;
10555 code = unsignedp ? GTU : GT;
10558 if (integer_onep (arg1))
10559 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10561 code = unsignedp ? GEU : GE;
10564 case UNORDERED_EXPR:
10590 /* Put a constant second. */
10591 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10593 tem = arg0; arg0 = arg1; arg1 = tem;
10594 code = swap_condition (code);
10597 /* If this is an equality or inequality test of a single bit, we can
10598 do this by shifting the bit being tested to the low-order bit and
10599 masking the result with the constant 1. If the condition was EQ,
10600 we xor it with 1. This does not require an scc insn and is faster
10601 than an scc insn even if we have it. */
10603 if ((code == NE || code == EQ)
10604 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10605 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10607 tree inner = TREE_OPERAND (arg0, 0);
10608 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10611 /* If INNER is a right shift of a constant and it plus BITNUM does
10612 not overflow, adjust BITNUM and INNER. */
10614 if (TREE_CODE (inner) == RSHIFT_EXPR
10615 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10616 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10617 && bitnum < TYPE_PRECISION (type)
10618 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10619 bitnum - TYPE_PRECISION (type)))
10621 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10622 inner = TREE_OPERAND (inner, 0);
10625 /* If we are going to be able to omit the AND below, we must do our
10626 operations as unsigned. If we must use the AND, we have a choice.
10627 Normally unsigned is faster, but for some machines signed is. */
10628 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10629 #ifdef LOAD_EXTEND_OP
10630 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10636 if (! get_subtarget (subtarget)
10637 || GET_MODE (subtarget) != operand_mode
10638 || ! safe_from_p (subtarget, inner, 1))
10641 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10644 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10645 size_int (bitnum), subtarget, ops_unsignedp);
10647 if (GET_MODE (op0) != mode)
10648 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10650 if ((code == EQ && ! invert) || (code == NE && invert))
10651 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10652 ops_unsignedp, OPTAB_LIB_WIDEN);
10654 /* Put the AND last so it can combine with more things. */
10655 if (bitnum != TYPE_PRECISION (type) - 1)
10656 op0 = expand_and (op0, const1_rtx, subtarget);
10661 /* Now see if we are likely to be able to do this. Return if not. */
10662 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10665 icode = setcc_gen_code[(int) code];
10666 if (icode == CODE_FOR_nothing
10667 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10669 /* We can only do this if it is one of the special cases that
10670 can be handled without an scc insn. */
10671 if ((code == LT && integer_zerop (arg1))
10672 || (! only_cheap && code == GE && integer_zerop (arg1)))
10674 else if (BRANCH_COST >= 0
10675 && ! only_cheap && (code == NE || code == EQ)
10676 && TREE_CODE (type) != REAL_TYPE
10677 && ((abs_optab->handlers[(int) operand_mode].insn_code
10678 != CODE_FOR_nothing)
10679 || (ffs_optab->handlers[(int) operand_mode].insn_code
10680 != CODE_FOR_nothing)))
10686 if (! get_subtarget (target)
10687 || GET_MODE (subtarget) != operand_mode
10688 || ! safe_from_p (subtarget, arg1, 1))
10691 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10692 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10695 target = gen_reg_rtx (mode);
10697 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10698 because, if the emit_store_flag does anything it will succeed and
10699 OP0 and OP1 will not be used subsequently. */
10701 result = emit_store_flag (target, code,
10702 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10703 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10704 operand_mode, unsignedp, 1);
10709 result = expand_binop (mode, xor_optab, result, const1_rtx,
10710 result, 0, OPTAB_LIB_WIDEN);
10714 /* If this failed, we have to do this with set/compare/jump/set code. */
10715 if (GET_CODE (target) != REG
10716 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10717 target = gen_reg_rtx (GET_MODE (target));
10719 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10720 result = compare_from_rtx (op0, op1, code, unsignedp,
10721 operand_mode, NULL_RTX, 0);
10722 if (GET_CODE (result) == CONST_INT)
10723 return (((result == const0_rtx && ! invert)
10724 || (result != const0_rtx && invert))
10725 ? const0_rtx : const1_rtx);
10727 label = gen_label_rtx ();
10728 if (bcc_gen_fctn[(int) code] == 0)
10731 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10732 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10733 emit_label (label);
10739 /* Stubs in case we haven't got a casesi insn. */
10740 #ifndef HAVE_casesi
10741 # define HAVE_casesi 0
10742 # define gen_casesi(a, b, c, d, e) (0)
10743 # define CODE_FOR_casesi CODE_FOR_nothing
10746 /* If the machine does not have a case insn that compares the bounds,
10747 this means extra overhead for dispatch tables, which raises the
10748 threshold for using them. */
10749 #ifndef CASE_VALUES_THRESHOLD
10750 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10751 #endif /* CASE_VALUES_THRESHOLD */
10754 case_values_threshold ()
10756 return CASE_VALUES_THRESHOLD;
10759 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10760 0 otherwise (i.e. if there is no casesi instruction). */
10762 try_casesi (index_type, index_expr, minval, range,
10763 table_label, default_label)
10764 tree index_type, index_expr, minval, range;
10765 rtx table_label ATTRIBUTE_UNUSED;
10768 enum machine_mode index_mode = SImode;
10769 int index_bits = GET_MODE_BITSIZE (index_mode);
10770 rtx op1, op2, index;
10771 enum machine_mode op_mode;
10776 /* Convert the index to SImode. */
10777 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10779 enum machine_mode omode = TYPE_MODE (index_type);
10780 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10782 /* We must handle the endpoints in the original mode. */
10783 index_expr = build (MINUS_EXPR, index_type,
10784 index_expr, minval);
10785 minval = integer_zero_node;
10786 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10787 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10788 omode, 1, 0, default_label);
10789 /* Now we can safely truncate. */
10790 index = convert_to_mode (index_mode, index, 0);
10794 if (TYPE_MODE (index_type) != index_mode)
10796 index_expr = convert (type_for_size (index_bits, 0),
10798 index_type = TREE_TYPE (index_expr);
10801 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10804 index = protect_from_queue (index, 0);
10805 do_pending_stack_adjust ();
10807 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10808 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10810 index = copy_to_mode_reg (op_mode, index);
10812 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10814 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10815 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10816 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10817 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10819 op1 = copy_to_mode_reg (op_mode, op1);
10821 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10823 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10824 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10825 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10826 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10828 op2 = copy_to_mode_reg (op_mode, op2);
10830 emit_jump_insn (gen_casesi (index, op1, op2,
10831 table_label, default_label));
10835 /* Attempt to generate a tablejump instruction; same concept. */
10836 #ifndef HAVE_tablejump
10837 #define HAVE_tablejump 0
10838 #define gen_tablejump(x, y) (0)
10841 /* Subroutine of the next function.
10843 INDEX is the value being switched on, with the lowest value
10844 in the table already subtracted.
10845 MODE is its expected mode (needed if INDEX is constant).
10846 RANGE is the length of the jump table.
10847 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10849 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10850 index value is out of range. */
10853 do_tablejump (index, mode, range, table_label, default_label)
10854 rtx index, range, table_label, default_label;
10855 enum machine_mode mode;
10859 /* Do an unsigned comparison (in the proper mode) between the index
10860 expression and the value which represents the length of the range.
10861 Since we just finished subtracting the lower bound of the range
10862 from the index expression, this comparison allows us to simultaneously
10863 check that the original index expression value is both greater than
10864 or equal to the minimum value of the range and less than or equal to
10865 the maximum value of the range. */
10867 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10870 /* If index is in range, it must fit in Pmode.
10871 Convert to Pmode so we can index with it. */
10873 index = convert_to_mode (Pmode, index, 1);
10875 /* Don't let a MEM slip thru, because then INDEX that comes
10876 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10877 and break_out_memory_refs will go to work on it and mess it up. */
10878 #ifdef PIC_CASE_VECTOR_ADDRESS
10879 if (flag_pic && GET_CODE (index) != REG)
10880 index = copy_to_mode_reg (Pmode, index);
10883 /* If flag_force_addr were to affect this address
10884 it could interfere with the tricky assumptions made
10885 about addresses that contain label-refs,
10886 which may be valid only very near the tablejump itself. */
10887 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10888 GET_MODE_SIZE, because this indicates how large insns are. The other
10889 uses should all be Pmode, because they are addresses. This code
10890 could fail if addresses and insns are not the same size. */
10891 index = gen_rtx_PLUS (Pmode,
10892 gen_rtx_MULT (Pmode, index,
10893 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10894 gen_rtx_LABEL_REF (Pmode, table_label));
10895 #ifdef PIC_CASE_VECTOR_ADDRESS
10897 index = PIC_CASE_VECTOR_ADDRESS (index);
10900 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10901 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10902 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10903 RTX_UNCHANGING_P (vector) = 1;
10904 convert_move (temp, vector, 0);
10906 emit_jump_insn (gen_tablejump (temp, table_label));
10908 /* If we are generating PIC code or if the table is PC-relative, the
10909 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10910 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10915 try_tablejump (index_type, index_expr, minval, range,
10916 table_label, default_label)
10917 tree index_type, index_expr, minval, range;
10918 rtx table_label, default_label;
10922 if (! HAVE_tablejump)
10925 index_expr = fold (build (MINUS_EXPR, index_type,
10926 convert (index_type, index_expr),
10927 convert (index_type, minval)));
10928 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10930 index = protect_from_queue (index, 0);
10931 do_pending_stack_adjust ();
10933 do_tablejump (index, TYPE_MODE (index_type),
10934 convert_modes (TYPE_MODE (index_type),
10935 TYPE_MODE (TREE_TYPE (range)),
10936 expand_expr (range, NULL_RTX,
10938 TREE_UNSIGNED (TREE_TYPE (range))),
10939 table_label, default_label);