1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first. */
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
65 #define STACK_PUSH_CODE PRE_INC
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
74 /* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
81 int (*lang_safe_from_p) PARAMS ((rtx, tree));
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* Don't check memory usage, since code is being emitted to check a memory
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
94 static int in_check_memory_usage;
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
99 /* This structure is used by move_by_pieces to describe the move to
101 struct move_by_pieces
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
116 /* This structure is used by store_by_pieces to describe the clear to
119 struct store_by_pieces
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
132 extern struct obstack permanent_obstack;
134 static rtx get_push_address PARAMS ((int));
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
140 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
150 struct store_by_pieces *));
151 static rtx get_subtarget PARAMS ((rtx));
152 static int is_zeros_p PARAMS ((tree));
153 static int mostly_zeros_p PARAMS ((tree));
154 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, unsigned int, int,
158 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int,
163 unsigned int, HOST_WIDE_INT, int));
164 static enum memory_use_mode
165 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
166 static tree save_noncopied_parts PARAMS ((tree, tree));
167 static tree init_noncopied_parts PARAMS ((tree, tree));
168 static int fixed_type_p PARAMS ((tree));
169 static rtx var_rtx PARAMS ((tree));
170 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
171 static rtx expand_increment PARAMS ((tree, int, int));
172 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
173 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
174 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
176 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
178 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
180 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
182 /* Record for each mode whether we can move a register directly to or
183 from an object of that mode in memory. If we can't, we won't try
184 to use that mode directly when accessing a field of that mode. */
186 static char direct_load[NUM_MACHINE_MODES];
187 static char direct_store[NUM_MACHINE_MODES];
189 /* If a memory-to-memory move would take MOVE_RATIO or more simple
190 move-instruction sequences, we will do a movstr or libcall instead. */
193 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
196 /* If we are optimizing for space (-Os), cut down the default move ratio. */
197 #define MOVE_RATIO (optimize_size ? 3 : 15)
201 /* This macro is used to determine whether move_by_pieces should be called
202 to perform a structure copy. */
203 #ifndef MOVE_BY_PIECES_P
204 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
205 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
208 /* This array records the insn_code of insns to perform block moves. */
209 enum insn_code movstr_optab[NUM_MACHINE_MODES];
211 /* This array records the insn_code of insns to perform block clears. */
212 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
214 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
216 #ifndef SLOW_UNALIGNED_ACCESS
217 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
220 /* This is run once per compilation to set up which modes can be used
221 directly in memory and to initialize the block move optab. */
227 enum machine_mode mode;
233 /* Try indexing by frame ptr and try by stack ptr.
234 It is known that on the Convex the stack ptr isn't a valid index.
235 With luck, one or the other is valid on any machine. */
236 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
237 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
239 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
240 pat = PATTERN (insn);
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
248 direct_load[(int) mode] = direct_store[(int) mode] = 0;
249 PUT_MODE (mem, mode);
250 PUT_MODE (mem1, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
260 if (! HARD_REGNO_MODE_OK (regno, mode))
263 reg = gen_rtx_REG (mode, regno);
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
290 /* This is run at the start of compiling a function. */
295 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
298 pending_stack_adjust = 0;
299 stack_pointer_delta = 0;
300 inhibit_defer_pop = 0;
302 apply_args_value = 0;
308 struct expr_status *p;
313 ggc_mark_rtx (p->x_saveregs_value);
314 ggc_mark_rtx (p->x_apply_args_value);
315 ggc_mark_rtx (p->x_forced_labels);
326 /* Small sanity check that the queue is empty at the end of a function. */
329 finish_expr_for_function ()
335 /* Manage the queue of increment instructions to be output
336 for POSTINCREMENT_EXPR expressions, etc. */
338 /* Queue up to increment (or change) VAR later. BODY says how:
339 BODY should be the same thing you would pass to emit_insn
340 to increment right away. It will go to emit_insn later on.
342 The value is a QUEUED expression to be used in place of VAR
343 where you want to guarantee the pre-incrementation value of VAR. */
346 enqueue_insn (var, body)
349 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
350 body, pending_chain);
351 return pending_chain;
354 /* Use protect_from_queue to convert a QUEUED expression
355 into something that you can put immediately into an instruction.
356 If the queued incrementation has not happened yet,
357 protect_from_queue returns the variable itself.
358 If the incrementation has happened, protect_from_queue returns a temp
359 that contains a copy of the old value of the variable.
361 Any time an rtx which might possibly be a QUEUED is to be put
362 into an instruction, it must be passed through protect_from_queue first.
363 QUEUED expressions are not meaningful in instructions.
365 Do not pass a value through protect_from_queue and then hold
366 on to it for a while before putting it in an instruction!
367 If the queue is flushed in between, incorrect code will result. */
370 protect_from_queue (x, modify)
374 register RTX_CODE code = GET_CODE (x);
376 #if 0 /* A QUEUED can hang around after the queue is forced out. */
377 /* Shortcut for most common case. */
378 if (pending_chain == 0)
384 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
385 use of autoincrement. Make a copy of the contents of the memory
386 location rather than a copy of the address, but not if the value is
387 of mode BLKmode. Don't modify X in place since it might be
389 if (code == MEM && GET_MODE (x) != BLKmode
390 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
393 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
397 rtx temp = gen_reg_rtx (GET_MODE (x));
399 emit_insn_before (gen_move_insn (temp, new),
404 /* Copy the address into a pseudo, so that the returned value
405 remains correct across calls to emit_queue. */
406 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
409 /* Otherwise, recursively protect the subexpressions of all
410 the kinds of rtx's that can contain a QUEUED. */
413 rtx tem = protect_from_queue (XEXP (x, 0), 0);
414 if (tem != XEXP (x, 0))
420 else if (code == PLUS || code == MULT)
422 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
423 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
424 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
433 /* If the increment has not happened, use the variable itself. Copy it
434 into a new pseudo so that the value remains correct across calls to
436 if (QUEUED_INSN (x) == 0)
437 return copy_to_reg (QUEUED_VAR (x));
438 /* If the increment has happened and a pre-increment copy exists,
440 if (QUEUED_COPY (x) != 0)
441 return QUEUED_COPY (x);
442 /* The increment has happened but we haven't set up a pre-increment copy.
443 Set one up now, and use it. */
444 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
445 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
447 return QUEUED_COPY (x);
450 /* Return nonzero if X contains a QUEUED expression:
451 if it contains anything that will be altered by a queued increment.
452 We handle only combinations of MEM, PLUS, MINUS and MULT operators
453 since memory addresses generally contain only those. */
459 register enum rtx_code code = GET_CODE (x);
465 return queued_subexp_p (XEXP (x, 0));
469 return (queued_subexp_p (XEXP (x, 0))
470 || queued_subexp_p (XEXP (x, 1)));
476 /* Perform all the pending incrementations. */
482 while ((p = pending_chain))
484 rtx body = QUEUED_BODY (p);
486 if (GET_CODE (body) == SEQUENCE)
488 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
489 emit_insn (QUEUED_BODY (p));
492 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
493 pending_chain = QUEUED_NEXT (p);
497 /* Copy data from FROM to TO, where the machine modes are not the same.
498 Both modes may be integer, or both may be floating.
499 UNSIGNEDP should be nonzero if FROM is an unsigned type.
500 This causes zero-extension instead of sign-extension. */
503 convert_move (to, from, unsignedp)
504 register rtx to, from;
507 enum machine_mode to_mode = GET_MODE (to);
508 enum machine_mode from_mode = GET_MODE (from);
509 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
510 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
514 /* rtx code for making an equivalent value. */
515 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
517 to = protect_from_queue (to, 1);
518 from = protect_from_queue (from, 0);
520 if (to_real != from_real)
523 /* If FROM is a SUBREG that indicates that we have already done at least
524 the required extension, strip it. We don't handle such SUBREGs as
527 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
528 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
529 >= GET_MODE_SIZE (to_mode))
530 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
531 from = gen_lowpart (to_mode, from), from_mode = to_mode;
533 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
536 if (to_mode == from_mode
537 || (from_mode == VOIDmode && CONSTANT_P (from)))
539 emit_move_insn (to, from);
543 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
545 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
548 if (VECTOR_MODE_P (to_mode))
549 from = gen_rtx_SUBREG (to_mode, from, 0);
551 to = gen_rtx_SUBREG (from_mode, to, 0);
553 emit_move_insn (to, from);
557 if (to_real != from_real)
564 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
566 /* Try converting directly if the insn is supported. */
567 if ((code = can_extend_p (to_mode, from_mode, 0))
570 emit_unop_insn (code, to, from, UNKNOWN);
575 #ifdef HAVE_trunchfqf2
576 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
578 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
582 #ifdef HAVE_trunctqfqf2
583 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
589 #ifdef HAVE_truncsfqf2
590 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
596 #ifdef HAVE_truncdfqf2
597 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
599 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
603 #ifdef HAVE_truncxfqf2
604 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
606 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
610 #ifdef HAVE_trunctfqf2
611 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
613 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
618 #ifdef HAVE_trunctqfhf2
619 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
621 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
625 #ifdef HAVE_truncsfhf2
626 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
632 #ifdef HAVE_truncdfhf2
633 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
635 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
639 #ifdef HAVE_truncxfhf2
640 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
642 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
646 #ifdef HAVE_trunctfhf2
647 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
649 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
654 #ifdef HAVE_truncsftqf2
655 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
657 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
661 #ifdef HAVE_truncdftqf2
662 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
664 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncxftqf2
669 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
671 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
675 #ifdef HAVE_trunctftqf2
676 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
678 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncdfsf2
684 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
686 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
690 #ifdef HAVE_truncxfsf2
691 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
693 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
697 #ifdef HAVE_trunctfsf2
698 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
700 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
704 #ifdef HAVE_truncxfdf2
705 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
707 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
711 #ifdef HAVE_trunctfdf2
712 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
714 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
726 libcall = extendsfdf2_libfunc;
730 libcall = extendsfxf2_libfunc;
734 libcall = extendsftf2_libfunc;
746 libcall = truncdfsf2_libfunc;
750 libcall = extenddfxf2_libfunc;
754 libcall = extenddftf2_libfunc;
766 libcall = truncxfsf2_libfunc;
770 libcall = truncxfdf2_libfunc;
782 libcall = trunctfsf2_libfunc;
786 libcall = trunctfdf2_libfunc;
798 if (libcall == (rtx) 0)
799 /* This conversion is not implemented yet. */
803 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
805 insns = get_insns ();
807 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
812 /* Now both modes are integers. */
814 /* Handle expanding beyond a word. */
815 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
816 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
823 enum machine_mode lowpart_mode;
824 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
826 /* Try converting directly if the insn is supported. */
827 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
830 /* If FROM is a SUBREG, put it into a register. Do this
831 so that we always generate the same set of insns for
832 better cse'ing; if an intermediate assignment occurred,
833 we won't be doing the operation directly on the SUBREG. */
834 if (optimize > 0 && GET_CODE (from) == SUBREG)
835 from = force_reg (from_mode, from);
836 emit_unop_insn (code, to, from, equiv_code);
839 /* Next, try converting via full word. */
840 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
841 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
842 != CODE_FOR_nothing))
844 if (GET_CODE (to) == REG)
845 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
846 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
847 emit_unop_insn (code, to,
848 gen_lowpart (word_mode, to), equiv_code);
852 /* No special multiword conversion insn; do it by hand. */
855 /* Since we will turn this into a no conflict block, we must ensure
856 that the source does not overlap the target. */
858 if (reg_overlap_mentioned_p (to, from))
859 from = force_reg (from_mode, from);
861 /* Get a copy of FROM widened to a word, if necessary. */
862 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
863 lowpart_mode = word_mode;
865 lowpart_mode = from_mode;
867 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
869 lowpart = gen_lowpart (lowpart_mode, to);
870 emit_move_insn (lowpart, lowfrom);
872 /* Compute the value to put in each remaining word. */
874 fill_value = const0_rtx;
879 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
880 && STORE_FLAG_VALUE == -1)
882 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
884 fill_value = gen_reg_rtx (word_mode);
885 emit_insn (gen_slt (fill_value));
891 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
892 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
894 fill_value = convert_to_mode (word_mode, fill_value, 1);
898 /* Fill the remaining words. */
899 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
901 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
902 rtx subword = operand_subword (to, index, 1, to_mode);
907 if (fill_value != subword)
908 emit_move_insn (subword, fill_value);
911 insns = get_insns ();
914 emit_no_conflict_block (insns, to, from, NULL_RTX,
915 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
919 /* Truncating multi-word to a word or less. */
920 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
921 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
923 if (!((GET_CODE (from) == MEM
924 && ! MEM_VOLATILE_P (from)
925 && direct_load[(int) to_mode]
926 && ! mode_dependent_address_p (XEXP (from, 0)))
927 || GET_CODE (from) == REG
928 || GET_CODE (from) == SUBREG))
929 from = force_reg (from_mode, from);
930 convert_move (to, gen_lowpart (word_mode, from), 0);
934 /* Handle pointer conversion. */ /* SPEE 900220. */
935 if (to_mode == PQImode)
937 if (from_mode != QImode)
938 from = convert_to_mode (QImode, from, unsignedp);
940 #ifdef HAVE_truncqipqi2
941 if (HAVE_truncqipqi2)
943 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
946 #endif /* HAVE_truncqipqi2 */
950 if (from_mode == PQImode)
952 if (to_mode != QImode)
954 from = convert_to_mode (QImode, from, unsignedp);
959 #ifdef HAVE_extendpqiqi2
960 if (HAVE_extendpqiqi2)
962 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
965 #endif /* HAVE_extendpqiqi2 */
970 if (to_mode == PSImode)
972 if (from_mode != SImode)
973 from = convert_to_mode (SImode, from, unsignedp);
975 #ifdef HAVE_truncsipsi2
976 if (HAVE_truncsipsi2)
978 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
981 #endif /* HAVE_truncsipsi2 */
985 if (from_mode == PSImode)
987 if (to_mode != SImode)
989 from = convert_to_mode (SImode, from, unsignedp);
994 #ifdef HAVE_extendpsisi2
995 if (! unsignedp && HAVE_extendpsisi2)
997 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1000 #endif /* HAVE_extendpsisi2 */
1001 #ifdef HAVE_zero_extendpsisi2
1002 if (unsignedp && HAVE_zero_extendpsisi2)
1004 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1007 #endif /* HAVE_zero_extendpsisi2 */
1012 if (to_mode == PDImode)
1014 if (from_mode != DImode)
1015 from = convert_to_mode (DImode, from, unsignedp);
1017 #ifdef HAVE_truncdipdi2
1018 if (HAVE_truncdipdi2)
1020 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1023 #endif /* HAVE_truncdipdi2 */
1027 if (from_mode == PDImode)
1029 if (to_mode != DImode)
1031 from = convert_to_mode (DImode, from, unsignedp);
1036 #ifdef HAVE_extendpdidi2
1037 if (HAVE_extendpdidi2)
1039 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1042 #endif /* HAVE_extendpdidi2 */
1047 /* Now follow all the conversions between integers
1048 no more than a word long. */
1050 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1051 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1052 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1053 GET_MODE_BITSIZE (from_mode)))
1055 if (!((GET_CODE (from) == MEM
1056 && ! MEM_VOLATILE_P (from)
1057 && direct_load[(int) to_mode]
1058 && ! mode_dependent_address_p (XEXP (from, 0)))
1059 || GET_CODE (from) == REG
1060 || GET_CODE (from) == SUBREG))
1061 from = force_reg (from_mode, from);
1062 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1063 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1064 from = copy_to_reg (from);
1065 emit_move_insn (to, gen_lowpart (to_mode, from));
1069 /* Handle extension. */
1070 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1072 /* Convert directly if that works. */
1073 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1074 != CODE_FOR_nothing)
1076 emit_unop_insn (code, to, from, equiv_code);
1081 enum machine_mode intermediate;
1085 /* Search for a mode to convert via. */
1086 for (intermediate = from_mode; intermediate != VOIDmode;
1087 intermediate = GET_MODE_WIDER_MODE (intermediate))
1088 if (((can_extend_p (to_mode, intermediate, unsignedp)
1089 != CODE_FOR_nothing)
1090 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1091 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1092 GET_MODE_BITSIZE (intermediate))))
1093 && (can_extend_p (intermediate, from_mode, unsignedp)
1094 != CODE_FOR_nothing))
1096 convert_move (to, convert_to_mode (intermediate, from,
1097 unsignedp), unsignedp);
1101 /* No suitable intermediate mode.
1102 Generate what we need with shifts. */
1103 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1104 - GET_MODE_BITSIZE (from_mode), 0);
1105 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1106 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1108 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1111 emit_move_insn (to, tmp);
1116 /* Support special truncate insns for certain modes. */
1118 if (from_mode == DImode && to_mode == SImode)
1120 #ifdef HAVE_truncdisi2
1121 if (HAVE_truncdisi2)
1123 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1127 convert_move (to, force_reg (from_mode, from), unsignedp);
1131 if (from_mode == DImode && to_mode == HImode)
1133 #ifdef HAVE_truncdihi2
1134 if (HAVE_truncdihi2)
1136 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1140 convert_move (to, force_reg (from_mode, from), unsignedp);
1144 if (from_mode == DImode && to_mode == QImode)
1146 #ifdef HAVE_truncdiqi2
1147 if (HAVE_truncdiqi2)
1149 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1153 convert_move (to, force_reg (from_mode, from), unsignedp);
1157 if (from_mode == SImode && to_mode == HImode)
1159 #ifdef HAVE_truncsihi2
1160 if (HAVE_truncsihi2)
1162 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1166 convert_move (to, force_reg (from_mode, from), unsignedp);
1170 if (from_mode == SImode && to_mode == QImode)
1172 #ifdef HAVE_truncsiqi2
1173 if (HAVE_truncsiqi2)
1175 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 if (from_mode == HImode && to_mode == QImode)
1185 #ifdef HAVE_trunchiqi2
1186 if (HAVE_trunchiqi2)
1188 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1192 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 if (from_mode == TImode && to_mode == DImode)
1198 #ifdef HAVE_trunctidi2
1199 if (HAVE_trunctidi2)
1201 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1205 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 if (from_mode == TImode && to_mode == SImode)
1211 #ifdef HAVE_trunctisi2
1212 if (HAVE_trunctisi2)
1214 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1218 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 if (from_mode == TImode && to_mode == HImode)
1224 #ifdef HAVE_trunctihi2
1225 if (HAVE_trunctihi2)
1227 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1231 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 if (from_mode == TImode && to_mode == QImode)
1237 #ifdef HAVE_trunctiqi2
1238 if (HAVE_trunctiqi2)
1240 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1244 convert_move (to, force_reg (from_mode, from), unsignedp);
1248 /* Handle truncation of volatile memrefs, and so on;
1249 the things that couldn't be truncated directly,
1250 and for which there was no special instruction. */
1251 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1253 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1254 emit_move_insn (to, temp);
1258 /* Mode combination is not recognized. */
1262 /* Return an rtx for a value that would result
1263 from converting X to mode MODE.
1264 Both X and MODE may be floating, or both integer.
1265 UNSIGNEDP is nonzero if X is an unsigned value.
1266 This can be done by referring to a part of X in place
1267 or by copying to a new temporary with conversion.
1269 This function *must not* call protect_from_queue
1270 except when putting X into an insn (in which case convert_move does it). */
1273 convert_to_mode (mode, x, unsignedp)
1274 enum machine_mode mode;
1278 return convert_modes (mode, VOIDmode, x, unsignedp);
1281 /* Return an rtx for a value that would result
1282 from converting X from mode OLDMODE to mode MODE.
1283 Both modes may be floating, or both integer.
1284 UNSIGNEDP is nonzero if X is an unsigned value.
1286 This can be done by referring to a part of X in place
1287 or by copying to a new temporary with conversion.
1289 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1291 This function *must not* call protect_from_queue
1292 except when putting X into an insn (in which case convert_move does it). */
1295 convert_modes (mode, oldmode, x, unsignedp)
1296 enum machine_mode mode, oldmode;
1302 /* If FROM is a SUBREG that indicates that we have already done at least
1303 the required extension, strip it. */
1305 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1306 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1307 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1308 x = gen_lowpart (mode, x);
1310 if (GET_MODE (x) != VOIDmode)
1311 oldmode = GET_MODE (x);
1313 if (mode == oldmode)
1316 /* There is one case that we must handle specially: If we are converting
1317 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1318 we are to interpret the constant as unsigned, gen_lowpart will do
1319 the wrong if the constant appears negative. What we want to do is
1320 make the high-order word of the constant zero, not all ones. */
1322 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1323 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1324 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1326 HOST_WIDE_INT val = INTVAL (x);
1328 if (oldmode != VOIDmode
1329 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1331 int width = GET_MODE_BITSIZE (oldmode);
1333 /* We need to zero extend VAL. */
1334 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1337 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1340 /* We can do this with a gen_lowpart if both desired and current modes
1341 are integer, and this is either a constant integer, a register, or a
1342 non-volatile MEM. Except for the constant case where MODE is no
1343 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1345 if ((GET_CODE (x) == CONST_INT
1346 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1347 || (GET_MODE_CLASS (mode) == MODE_INT
1348 && GET_MODE_CLASS (oldmode) == MODE_INT
1349 && (GET_CODE (x) == CONST_DOUBLE
1350 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1351 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1352 && direct_load[(int) mode])
1353 || (GET_CODE (x) == REG
1354 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1355 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1357 /* ?? If we don't know OLDMODE, we have to assume here that
1358 X does not need sign- or zero-extension. This may not be
1359 the case, but it's the best we can do. */
1360 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1361 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1363 HOST_WIDE_INT val = INTVAL (x);
1364 int width = GET_MODE_BITSIZE (oldmode);
1366 /* We must sign or zero-extend in this case. Start by
1367 zero-extending, then sign extend if we need to. */
1368 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1370 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1371 val |= (HOST_WIDE_INT) (-1) << width;
1373 return GEN_INT (trunc_int_for_mode (val, mode));
1376 return gen_lowpart (mode, x);
1379 temp = gen_reg_rtx (mode);
1380 convert_move (temp, x, unsignedp);
1384 /* This macro is used to determine what the largest unit size that
1385 move_by_pieces can use is. */
1387 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1388 move efficiently, as opposed to MOVE_MAX which is the maximum
1389 number of bytes we can move with a single instruction. */
1391 #ifndef MOVE_MAX_PIECES
1392 #define MOVE_MAX_PIECES MOVE_MAX
1395 /* Generate several move instructions to copy LEN bytes from block FROM to
1396 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1397 and TO through protect_from_queue before calling.
1399 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1400 used to push FROM to the stack.
1402 ALIGN is maximum alignment we can assume. */
1405 move_by_pieces (to, from, len, align)
1407 unsigned HOST_WIDE_INT len;
1410 struct move_by_pieces data;
1411 rtx to_addr, from_addr = XEXP (from, 0);
1412 unsigned int max_size = MOVE_MAX_PIECES + 1;
1413 enum machine_mode mode = VOIDmode, tmode;
1414 enum insn_code icode;
1417 data.from_addr = from_addr;
1420 to_addr = XEXP (to, 0);
1423 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1424 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1426 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1433 #ifdef STACK_GROWS_DOWNWARD
1439 data.to_addr = to_addr;
1442 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1443 || GET_CODE (from_addr) == POST_INC
1444 || GET_CODE (from_addr) == POST_DEC);
1446 data.explicit_inc_from = 0;
1447 data.explicit_inc_to = 0;
1448 if (data.reverse) data.offset = len;
1451 /* If copying requires more than two move insns,
1452 copy addresses to registers (to make displacements shorter)
1453 and use post-increment if available. */
1454 if (!(data.autinc_from && data.autinc_to)
1455 && move_by_pieces_ninsns (len, align) > 2)
1457 /* Find the mode of the largest move... */
1458 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1459 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1460 if (GET_MODE_SIZE (tmode) < max_size)
1463 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1465 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1466 data.autinc_from = 1;
1467 data.explicit_inc_from = -1;
1469 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1471 data.from_addr = copy_addr_to_reg (from_addr);
1472 data.autinc_from = 1;
1473 data.explicit_inc_from = 1;
1475 if (!data.autinc_from && CONSTANT_P (from_addr))
1476 data.from_addr = copy_addr_to_reg (from_addr);
1477 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1479 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1481 data.explicit_inc_to = -1;
1483 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1485 data.to_addr = copy_addr_to_reg (to_addr);
1487 data.explicit_inc_to = 1;
1489 if (!data.autinc_to && CONSTANT_P (to_addr))
1490 data.to_addr = copy_addr_to_reg (to_addr);
1493 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1494 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1495 align = MOVE_MAX * BITS_PER_UNIT;
1497 /* First move what we can in the largest integer mode, then go to
1498 successively smaller modes. */
1500 while (max_size > 1)
1502 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1503 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1504 if (GET_MODE_SIZE (tmode) < max_size)
1507 if (mode == VOIDmode)
1510 icode = mov_optab->handlers[(int) mode].insn_code;
1511 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1512 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1514 max_size = GET_MODE_SIZE (mode);
1517 /* The code above should have handled everything. */
1522 /* Return number of insns required to move L bytes by pieces.
1523 ALIGN (in bits) is maximum alignment we can assume. */
1525 static unsigned HOST_WIDE_INT
1526 move_by_pieces_ninsns (l, align)
1527 unsigned HOST_WIDE_INT l;
1530 unsigned HOST_WIDE_INT n_insns = 0;
1531 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1533 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1534 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1535 align = MOVE_MAX * BITS_PER_UNIT;
1537 while (max_size > 1)
1539 enum machine_mode mode = VOIDmode, tmode;
1540 enum insn_code icode;
1542 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1543 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1544 if (GET_MODE_SIZE (tmode) < max_size)
1547 if (mode == VOIDmode)
1550 icode = mov_optab->handlers[(int) mode].insn_code;
1551 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1552 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1554 max_size = GET_MODE_SIZE (mode);
1562 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1563 with move instructions for mode MODE. GENFUN is the gen_... function
1564 to make a move insn for that mode. DATA has all the other info. */
1567 move_by_pieces_1 (genfun, mode, data)
1568 rtx (*genfun) PARAMS ((rtx, ...));
1569 enum machine_mode mode;
1570 struct move_by_pieces *data;
1572 unsigned int size = GET_MODE_SIZE (mode);
1573 rtx to1 = NULL_RTX, from1;
1575 while (data->len >= size)
1578 data->offset -= size;
1582 if (data->autinc_to)
1584 to1 = replace_equiv_address (data->to, data->to_addr);
1585 to1 = adjust_address (to1, mode, 0);
1588 to1 = adjust_address (data->to, mode, data->offset);
1591 if (data->autinc_from)
1593 from1 = replace_equiv_address (data->from, data->from_addr);
1594 from1 = adjust_address (from1, mode, 0);
1597 from1 = adjust_address (data->from, mode, data->offset);
1599 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1600 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1601 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1602 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1605 emit_insn ((*genfun) (to1, from1));
1608 #ifdef PUSH_ROUNDING
1609 emit_single_push_insn (mode, from1, NULL);
1615 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1616 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1617 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1618 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1620 if (! data->reverse)
1621 data->offset += size;
1627 /* Emit code to move a block Y to a block X.
1628 This may be done with string-move instructions,
1629 with multiple scalar move instructions, or with a library call.
1631 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1633 SIZE is an rtx that says how long they are.
1634 ALIGN is the maximum alignment we can assume they have.
1636 Return the address of the new block, if memcpy is called and returns it,
1640 emit_block_move (x, y, size, align)
1646 #ifdef TARGET_MEM_FUNCTIONS
1648 tree call_expr, arg_list;
1651 if (GET_MODE (x) != BLKmode)
1654 if (GET_MODE (y) != BLKmode)
1657 x = protect_from_queue (x, 1);
1658 y = protect_from_queue (y, 0);
1659 size = protect_from_queue (size, 0);
1661 if (GET_CODE (x) != MEM)
1663 if (GET_CODE (y) != MEM)
1668 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1669 move_by_pieces (x, y, INTVAL (size), align);
1672 /* Try the most limited insn first, because there's no point
1673 including more than one in the machine description unless
1674 the more limited one has some advantage. */
1676 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1677 enum machine_mode mode;
1679 /* Since this is a move insn, we don't care about volatility. */
1682 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1683 mode = GET_MODE_WIDER_MODE (mode))
1685 enum insn_code code = movstr_optab[(int) mode];
1686 insn_operand_predicate_fn pred;
1688 if (code != CODE_FOR_nothing
1689 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1690 here because if SIZE is less than the mode mask, as it is
1691 returned by the macro, it will definitely be less than the
1692 actual mode mask. */
1693 && ((GET_CODE (size) == CONST_INT
1694 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1695 <= (GET_MODE_MASK (mode) >> 1)))
1696 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1697 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1698 || (*pred) (x, BLKmode))
1699 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1700 || (*pred) (y, BLKmode))
1701 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1702 || (*pred) (opalign, VOIDmode)))
1705 rtx last = get_last_insn ();
1708 op2 = convert_to_mode (mode, size, 1);
1709 pred = insn_data[(int) code].operand[2].predicate;
1710 if (pred != 0 && ! (*pred) (op2, mode))
1711 op2 = copy_to_mode_reg (mode, op2);
1713 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1721 delete_insns_since (last);
1727 /* X, Y, or SIZE may have been passed through protect_from_queue.
1729 It is unsafe to save the value generated by protect_from_queue
1730 and reuse it later. Consider what happens if emit_queue is
1731 called before the return value from protect_from_queue is used.
1733 Expansion of the CALL_EXPR below will call emit_queue before
1734 we are finished emitting RTL for argument setup. So if we are
1735 not careful we could get the wrong value for an argument.
1737 To avoid this problem we go ahead and emit code to copy X, Y &
1738 SIZE into new pseudos. We can then place those new pseudos
1739 into an RTL_EXPR and use them later, even after a call to
1742 Note this is not strictly needed for library calls since they
1743 do not call emit_queue before loading their arguments. However,
1744 we may need to have library calls call emit_queue in the future
1745 since failing to do so could cause problems for targets which
1746 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1747 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1748 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1750 #ifdef TARGET_MEM_FUNCTIONS
1751 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1753 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1754 TREE_UNSIGNED (integer_type_node));
1755 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1758 #ifdef TARGET_MEM_FUNCTIONS
1759 /* It is incorrect to use the libcall calling conventions to call
1760 memcpy in this context.
1762 This could be a user call to memcpy and the user may wish to
1763 examine the return value from memcpy.
1765 For targets where libcalls and normal calls have different conventions
1766 for returning pointers, we could end up generating incorrect code.
1768 So instead of using a libcall sequence we build up a suitable
1769 CALL_EXPR and expand the call in the normal fashion. */
1770 if (fn == NULL_TREE)
1774 /* This was copied from except.c, I don't know if all this is
1775 necessary in this context or not. */
1776 fn = get_identifier ("memcpy");
1777 fntype = build_pointer_type (void_type_node);
1778 fntype = build_function_type (fntype, NULL_TREE);
1779 fn = build_decl (FUNCTION_DECL, fn, fntype);
1780 ggc_add_tree_root (&fn, 1);
1781 DECL_EXTERNAL (fn) = 1;
1782 TREE_PUBLIC (fn) = 1;
1783 DECL_ARTIFICIAL (fn) = 1;
1784 TREE_NOTHROW (fn) = 1;
1785 make_decl_rtl (fn, NULL);
1786 assemble_external (fn);
1789 /* We need to make an argument list for the function call.
1791 memcpy has three arguments, the first two are void * addresses and
1792 the last is a size_t byte count for the copy. */
1794 = build_tree_list (NULL_TREE,
1795 make_tree (build_pointer_type (void_type_node), x));
1796 TREE_CHAIN (arg_list)
1797 = build_tree_list (NULL_TREE,
1798 make_tree (build_pointer_type (void_type_node), y));
1799 TREE_CHAIN (TREE_CHAIN (arg_list))
1800 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1801 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1803 /* Now we have to build up the CALL_EXPR itself. */
1804 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1805 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1806 call_expr, arg_list, NULL_TREE);
1807 TREE_SIDE_EFFECTS (call_expr) = 1;
1809 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1811 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1812 VOIDmode, 3, y, Pmode, x, Pmode,
1813 convert_to_mode (TYPE_MODE (integer_type_node), size,
1814 TREE_UNSIGNED (integer_type_node)),
1815 TYPE_MODE (integer_type_node));
1822 /* Copy all or part of a value X into registers starting at REGNO.
1823 The number of registers to be filled is NREGS. */
1826 move_block_to_reg (regno, x, nregs, mode)
1830 enum machine_mode mode;
1833 #ifdef HAVE_load_multiple
1841 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1842 x = validize_mem (force_const_mem (mode, x));
1844 /* See if the machine can do this with a load multiple insn. */
1845 #ifdef HAVE_load_multiple
1846 if (HAVE_load_multiple)
1848 last = get_last_insn ();
1849 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1857 delete_insns_since (last);
1861 for (i = 0; i < nregs; i++)
1862 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1863 operand_subword_force (x, i, mode));
1866 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1867 The number of registers to be filled is NREGS. SIZE indicates the number
1868 of bytes in the object X. */
1871 move_block_from_reg (regno, x, nregs, size)
1878 #ifdef HAVE_store_multiple
1882 enum machine_mode mode;
1887 /* If SIZE is that of a mode no bigger than a word, just use that
1888 mode's store operation. */
1889 if (size <= UNITS_PER_WORD
1890 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1892 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1896 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1897 to the left before storing to memory. Note that the previous test
1898 doesn't handle all cases (e.g. SIZE == 3). */
1899 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1901 rtx tem = operand_subword (x, 0, 1, BLKmode);
1907 shift = expand_shift (LSHIFT_EXPR, word_mode,
1908 gen_rtx_REG (word_mode, regno),
1909 build_int_2 ((UNITS_PER_WORD - size)
1910 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1911 emit_move_insn (tem, shift);
1915 /* See if the machine can do this with a store multiple insn. */
1916 #ifdef HAVE_store_multiple
1917 if (HAVE_store_multiple)
1919 last = get_last_insn ();
1920 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1928 delete_insns_since (last);
1932 for (i = 0; i < nregs; i++)
1934 rtx tem = operand_subword (x, i, 1, BLKmode);
1939 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1943 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1944 registers represented by a PARALLEL. SSIZE represents the total size of
1945 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1947 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1948 the balance will be in what would be the low-order memory addresses, i.e.
1949 left justified for big endian, right justified for little endian. This
1950 happens to be true for the targets currently using this support. If this
1951 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1955 emit_group_load (dst, orig_src, ssize, align)
1963 if (GET_CODE (dst) != PARALLEL)
1966 /* Check for a NULL entry, used to indicate that the parameter goes
1967 both on the stack and in registers. */
1968 if (XEXP (XVECEXP (dst, 0, 0), 0))
1973 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1975 /* Process the pieces. */
1976 for (i = start; i < XVECLEN (dst, 0); i++)
1978 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1979 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1980 unsigned int bytelen = GET_MODE_SIZE (mode);
1983 /* Handle trailing fragments that run over the size of the struct. */
1984 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1986 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1987 bytelen = ssize - bytepos;
1992 /* If we won't be loading directly from memory, protect the real source
1993 from strange tricks we might play; but make sure that the source can
1994 be loaded directly into the destination. */
1996 if (GET_CODE (orig_src) != MEM
1997 && (!CONSTANT_P (orig_src)
1998 || (GET_MODE (orig_src) != mode
1999 && GET_MODE (orig_src) != VOIDmode)))
2001 if (GET_MODE (orig_src) == VOIDmode)
2002 src = gen_reg_rtx (mode);
2004 src = gen_reg_rtx (GET_MODE (orig_src));
2005 emit_move_insn (src, orig_src);
2008 /* Optimize the access just a bit. */
2009 if (GET_CODE (src) == MEM
2010 && align >= GET_MODE_ALIGNMENT (mode)
2011 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2012 && bytelen == GET_MODE_SIZE (mode))
2014 tmps[i] = gen_reg_rtx (mode);
2015 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2017 else if (GET_CODE (src) == CONCAT)
2020 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2021 tmps[i] = XEXP (src, 0);
2022 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2023 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2024 tmps[i] = XEXP (src, 1);
2028 else if (CONSTANT_P (src)
2029 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2032 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2033 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2034 mode, mode, align, ssize);
2036 if (BYTES_BIG_ENDIAN && shift)
2037 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2038 tmps[i], 0, OPTAB_WIDEN);
2043 /* Copy the extracted pieces into the proper (probable) hard regs. */
2044 for (i = start; i < XVECLEN (dst, 0); i++)
2045 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2048 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2049 registers represented by a PARALLEL. SSIZE represents the total size of
2050 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2053 emit_group_store (orig_dst, src, ssize, align)
2061 if (GET_CODE (src) != PARALLEL)
2064 /* Check for a NULL entry, used to indicate that the parameter goes
2065 both on the stack and in registers. */
2066 if (XEXP (XVECEXP (src, 0, 0), 0))
2071 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2073 /* Copy the (probable) hard regs into pseudos. */
2074 for (i = start; i < XVECLEN (src, 0); i++)
2076 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2077 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2078 emit_move_insn (tmps[i], reg);
2082 /* If we won't be storing directly into memory, protect the real destination
2083 from strange tricks we might play. */
2085 if (GET_CODE (dst) == PARALLEL)
2089 /* We can get a PARALLEL dst if there is a conditional expression in
2090 a return statement. In that case, the dst and src are the same,
2091 so no action is necessary. */
2092 if (rtx_equal_p (dst, src))
2095 /* It is unclear if we can ever reach here, but we may as well handle
2096 it. Allocate a temporary, and split this into a store/load to/from
2099 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2100 emit_group_store (temp, src, ssize, align);
2101 emit_group_load (dst, temp, ssize, align);
2104 else if (GET_CODE (dst) != MEM)
2106 dst = gen_reg_rtx (GET_MODE (orig_dst));
2107 /* Make life a bit easier for combine. */
2108 emit_move_insn (dst, const0_rtx);
2111 /* Process the pieces. */
2112 for (i = start; i < XVECLEN (src, 0); i++)
2114 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2115 enum machine_mode mode = GET_MODE (tmps[i]);
2116 unsigned int bytelen = GET_MODE_SIZE (mode);
2118 /* Handle trailing fragments that run over the size of the struct. */
2119 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2121 if (BYTES_BIG_ENDIAN)
2123 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2124 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2125 tmps[i], 0, OPTAB_WIDEN);
2127 bytelen = ssize - bytepos;
2130 /* Optimize the access just a bit. */
2131 if (GET_CODE (dst) == MEM
2132 && align >= GET_MODE_ALIGNMENT (mode)
2133 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2134 && bytelen == GET_MODE_SIZE (mode))
2135 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2137 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2138 mode, tmps[i], align, ssize);
2143 /* Copy from the pseudo into the (probable) hard reg. */
2144 if (GET_CODE (dst) == REG)
2145 emit_move_insn (orig_dst, dst);
2148 /* Generate code to copy a BLKmode object of TYPE out of a
2149 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2150 is null, a stack temporary is created. TGTBLK is returned.
2152 The primary purpose of this routine is to handle functions
2153 that return BLKmode structures in registers. Some machines
2154 (the PA for example) want to return all small structures
2155 in registers regardless of the structure's alignment. */
2158 copy_blkmode_from_reg (tgtblk, srcreg, type)
2163 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2164 rtx src = NULL, dst = NULL;
2165 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2166 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2170 tgtblk = assign_temp (build_qualified_type (type,
2172 | TYPE_QUAL_CONST)),
2174 preserve_temp_slots (tgtblk);
2177 /* This code assumes srcreg is at least a full word. If it isn't,
2178 copy it into a new pseudo which is a full word. */
2179 if (GET_MODE (srcreg) != BLKmode
2180 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2181 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2183 /* Structures whose size is not a multiple of a word are aligned
2184 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2185 machine, this means we must skip the empty high order bytes when
2186 calculating the bit offset. */
2187 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2188 big_endian_correction
2189 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2191 /* Copy the structure BITSIZE bites at a time.
2193 We could probably emit more efficient code for machines which do not use
2194 strict alignment, but it doesn't seem worth the effort at the current
2196 for (bitpos = 0, xbitpos = big_endian_correction;
2197 bitpos < bytes * BITS_PER_UNIT;
2198 bitpos += bitsize, xbitpos += bitsize)
2200 /* We need a new source operand each time xbitpos is on a
2201 word boundary and when xbitpos == big_endian_correction
2202 (the first time through). */
2203 if (xbitpos % BITS_PER_WORD == 0
2204 || xbitpos == big_endian_correction)
2205 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2208 /* We need a new destination operand each time bitpos is on
2210 if (bitpos % BITS_PER_WORD == 0)
2211 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2213 /* Use xbitpos for the source extraction (right justified) and
2214 xbitpos for the destination store (left justified). */
2215 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2216 extract_bit_field (src, bitsize,
2217 xbitpos % BITS_PER_WORD, 1,
2218 NULL_RTX, word_mode, word_mode,
2219 bitsize, BITS_PER_WORD),
2220 bitsize, BITS_PER_WORD);
2226 /* Add a USE expression for REG to the (possibly empty) list pointed
2227 to by CALL_FUSAGE. REG must denote a hard register. */
2230 use_reg (call_fusage, reg)
2231 rtx *call_fusage, reg;
2233 if (GET_CODE (reg) != REG
2234 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2238 = gen_rtx_EXPR_LIST (VOIDmode,
2239 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2242 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2243 starting at REGNO. All of these registers must be hard registers. */
2246 use_regs (call_fusage, regno, nregs)
2253 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2256 for (i = 0; i < nregs; i++)
2257 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2260 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2261 PARALLEL REGS. This is for calls that pass values in multiple
2262 non-contiguous locations. The Irix 6 ABI has examples of this. */
2265 use_group_regs (call_fusage, regs)
2271 for (i = 0; i < XVECLEN (regs, 0); i++)
2273 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2275 /* A NULL entry means the parameter goes both on the stack and in
2276 registers. This can also be a MEM for targets that pass values
2277 partially on the stack and partially in registers. */
2278 if (reg != 0 && GET_CODE (reg) == REG)
2279 use_reg (call_fusage, reg);
2285 can_store_by_pieces (len, constfun, constfundata, align)
2286 unsigned HOST_WIDE_INT len;
2287 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2291 unsigned HOST_WIDE_INT max_size, l;
2292 HOST_WIDE_INT offset = 0;
2293 enum machine_mode mode, tmode;
2294 enum insn_code icode;
2298 if (! MOVE_BY_PIECES_P (len, align))
2301 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2302 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2303 align = MOVE_MAX * BITS_PER_UNIT;
2305 /* We would first store what we can in the largest integer mode, then go to
2306 successively smaller modes. */
2309 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2314 max_size = MOVE_MAX_PIECES + 1;
2315 while (max_size > 1)
2317 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2318 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2319 if (GET_MODE_SIZE (tmode) < max_size)
2322 if (mode == VOIDmode)
2325 icode = mov_optab->handlers[(int) mode].insn_code;
2326 if (icode != CODE_FOR_nothing
2327 && align >= GET_MODE_ALIGNMENT (mode))
2329 unsigned int size = GET_MODE_SIZE (mode);
2336 cst = (*constfun) (constfundata, offset, mode);
2337 if (!LEGITIMATE_CONSTANT_P (cst))
2347 max_size = GET_MODE_SIZE (mode);
2350 /* The code above should have handled everything. */
2358 /* Generate several move instructions to store LEN bytes generated by
2359 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2360 pointer which will be passed as argument in every CONSTFUN call.
2361 ALIGN is maximum alignment we can assume. */
2364 store_by_pieces (to, len, constfun, constfundata, align)
2366 unsigned HOST_WIDE_INT len;
2367 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2371 struct store_by_pieces data;
2373 if (! MOVE_BY_PIECES_P (len, align))
2375 to = protect_from_queue (to, 1);
2376 data.constfun = constfun;
2377 data.constfundata = constfundata;
2380 store_by_pieces_1 (&data, align);
2383 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2384 rtx with BLKmode). The caller must pass TO through protect_from_queue
2385 before calling. ALIGN is maximum alignment we can assume. */
2388 clear_by_pieces (to, len, align)
2390 unsigned HOST_WIDE_INT len;
2393 struct store_by_pieces data;
2395 data.constfun = clear_by_pieces_1;
2396 data.constfundata = NULL;
2399 store_by_pieces_1 (&data, align);
2402 /* Callback routine for clear_by_pieces.
2403 Return const0_rtx unconditionally. */
2406 clear_by_pieces_1 (data, offset, mode)
2407 PTR data ATTRIBUTE_UNUSED;
2408 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2409 enum machine_mode mode ATTRIBUTE_UNUSED;
2414 /* Subroutine of clear_by_pieces and store_by_pieces.
2415 Generate several move instructions to store LEN bytes of block TO. (A MEM
2416 rtx with BLKmode). The caller must pass TO through protect_from_queue
2417 before calling. ALIGN is maximum alignment we can assume. */
2420 store_by_pieces_1 (data, align)
2421 struct store_by_pieces *data;
2424 rtx to_addr = XEXP (data->to, 0);
2425 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2426 enum machine_mode mode = VOIDmode, tmode;
2427 enum insn_code icode;
2430 data->to_addr = to_addr;
2432 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2433 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2435 data->explicit_inc_to = 0;
2437 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2439 data->offset = data->len;
2441 /* If storing requires more than two move insns,
2442 copy addresses to registers (to make displacements shorter)
2443 and use post-increment if available. */
2444 if (!data->autinc_to
2445 && move_by_pieces_ninsns (data->len, align) > 2)
2447 /* Determine the main mode we'll be using. */
2448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2449 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2450 if (GET_MODE_SIZE (tmode) < max_size)
2453 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2455 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2456 data->autinc_to = 1;
2457 data->explicit_inc_to = -1;
2460 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2461 && ! data->autinc_to)
2463 data->to_addr = copy_addr_to_reg (to_addr);
2464 data->autinc_to = 1;
2465 data->explicit_inc_to = 1;
2468 if ( !data->autinc_to && CONSTANT_P (to_addr))
2469 data->to_addr = copy_addr_to_reg (to_addr);
2472 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2473 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2474 align = MOVE_MAX * BITS_PER_UNIT;
2476 /* First store what we can in the largest integer mode, then go to
2477 successively smaller modes. */
2479 while (max_size > 1)
2481 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2482 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2483 if (GET_MODE_SIZE (tmode) < max_size)
2486 if (mode == VOIDmode)
2489 icode = mov_optab->handlers[(int) mode].insn_code;
2490 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2491 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2493 max_size = GET_MODE_SIZE (mode);
2496 /* The code above should have handled everything. */
2501 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2502 with move instructions for mode MODE. GENFUN is the gen_... function
2503 to make a move insn for that mode. DATA has all the other info. */
2506 store_by_pieces_2 (genfun, mode, data)
2507 rtx (*genfun) PARAMS ((rtx, ...));
2508 enum machine_mode mode;
2509 struct store_by_pieces *data;
2511 unsigned int size = GET_MODE_SIZE (mode);
2514 while (data->len >= size)
2517 data->offset -= size;
2519 if (data->autinc_to)
2521 to1 = replace_equiv_address (data->to, data->to_addr);
2522 to1 = adjust_address (to1, mode, 0);
2525 to1 = adjust_address (data->to, mode, data->offset);
2527 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2528 emit_insn (gen_add2_insn (data->to_addr,
2529 GEN_INT (-(HOST_WIDE_INT) size)));
2531 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2532 emit_insn ((*genfun) (to1, cst));
2534 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2535 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2537 if (! data->reverse)
2538 data->offset += size;
2544 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2545 its length in bytes and ALIGN is the maximum alignment we can is has.
2547 If we call a function that returns the length of the block, return it. */
2550 clear_storage (object, size, align)
2555 #ifdef TARGET_MEM_FUNCTIONS
2557 tree call_expr, arg_list;
2561 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2562 just move a zero. Otherwise, do this a piece at a time. */
2563 if (GET_MODE (object) != BLKmode
2564 && GET_CODE (size) == CONST_INT
2565 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2566 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2569 object = protect_from_queue (object, 1);
2570 size = protect_from_queue (size, 0);
2572 if (GET_CODE (size) == CONST_INT
2573 && MOVE_BY_PIECES_P (INTVAL (size), align))
2574 clear_by_pieces (object, INTVAL (size), align);
2577 /* Try the most limited insn first, because there's no point
2578 including more than one in the machine description unless
2579 the more limited one has some advantage. */
2581 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2582 enum machine_mode mode;
2584 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2585 mode = GET_MODE_WIDER_MODE (mode))
2587 enum insn_code code = clrstr_optab[(int) mode];
2588 insn_operand_predicate_fn pred;
2590 if (code != CODE_FOR_nothing
2591 /* We don't need MODE to be narrower than
2592 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2593 the mode mask, as it is returned by the macro, it will
2594 definitely be less than the actual mode mask. */
2595 && ((GET_CODE (size) == CONST_INT
2596 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2597 <= (GET_MODE_MASK (mode) >> 1)))
2598 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2599 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2600 || (*pred) (object, BLKmode))
2601 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2602 || (*pred) (opalign, VOIDmode)))
2605 rtx last = get_last_insn ();
2608 op1 = convert_to_mode (mode, size, 1);
2609 pred = insn_data[(int) code].operand[1].predicate;
2610 if (pred != 0 && ! (*pred) (op1, mode))
2611 op1 = copy_to_mode_reg (mode, op1);
2613 pat = GEN_FCN ((int) code) (object, op1, opalign);
2620 delete_insns_since (last);
2624 /* OBJECT or SIZE may have been passed through protect_from_queue.
2626 It is unsafe to save the value generated by protect_from_queue
2627 and reuse it later. Consider what happens if emit_queue is
2628 called before the return value from protect_from_queue is used.
2630 Expansion of the CALL_EXPR below will call emit_queue before
2631 we are finished emitting RTL for argument setup. So if we are
2632 not careful we could get the wrong value for an argument.
2634 To avoid this problem we go ahead and emit code to copy OBJECT
2635 and SIZE into new pseudos. We can then place those new pseudos
2636 into an RTL_EXPR and use them later, even after a call to
2639 Note this is not strictly needed for library calls since they
2640 do not call emit_queue before loading their arguments. However,
2641 we may need to have library calls call emit_queue in the future
2642 since failing to do so could cause problems for targets which
2643 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2644 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2646 #ifdef TARGET_MEM_FUNCTIONS
2647 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2649 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2650 TREE_UNSIGNED (integer_type_node));
2651 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2654 #ifdef TARGET_MEM_FUNCTIONS
2655 /* It is incorrect to use the libcall calling conventions to call
2656 memset in this context.
2658 This could be a user call to memset and the user may wish to
2659 examine the return value from memset.
2661 For targets where libcalls and normal calls have different
2662 conventions for returning pointers, we could end up generating
2665 So instead of using a libcall sequence we build up a suitable
2666 CALL_EXPR and expand the call in the normal fashion. */
2667 if (fn == NULL_TREE)
2671 /* This was copied from except.c, I don't know if all this is
2672 necessary in this context or not. */
2673 fn = get_identifier ("memset");
2674 fntype = build_pointer_type (void_type_node);
2675 fntype = build_function_type (fntype, NULL_TREE);
2676 fn = build_decl (FUNCTION_DECL, fn, fntype);
2677 ggc_add_tree_root (&fn, 1);
2678 DECL_EXTERNAL (fn) = 1;
2679 TREE_PUBLIC (fn) = 1;
2680 DECL_ARTIFICIAL (fn) = 1;
2681 TREE_NOTHROW (fn) = 1;
2682 make_decl_rtl (fn, NULL);
2683 assemble_external (fn);
2686 /* We need to make an argument list for the function call.
2688 memset has three arguments, the first is a void * addresses, the
2689 second a integer with the initialization value, the last is a
2690 size_t byte count for the copy. */
2692 = build_tree_list (NULL_TREE,
2693 make_tree (build_pointer_type (void_type_node),
2695 TREE_CHAIN (arg_list)
2696 = build_tree_list (NULL_TREE,
2697 make_tree (integer_type_node, const0_rtx));
2698 TREE_CHAIN (TREE_CHAIN (arg_list))
2699 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2700 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2702 /* Now we have to build up the CALL_EXPR itself. */
2703 call_expr = build1 (ADDR_EXPR,
2704 build_pointer_type (TREE_TYPE (fn)), fn);
2705 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2706 call_expr, arg_list, NULL_TREE);
2707 TREE_SIDE_EFFECTS (call_expr) = 1;
2709 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2711 emit_library_call (bzero_libfunc, LCT_NORMAL,
2712 VOIDmode, 2, object, Pmode, size,
2713 TYPE_MODE (integer_type_node));
2721 /* Generate code to copy Y into X.
2722 Both Y and X must have the same mode, except that
2723 Y can be a constant with VOIDmode.
2724 This mode cannot be BLKmode; use emit_block_move for that.
2726 Return the last instruction emitted. */
2729 emit_move_insn (x, y)
2732 enum machine_mode mode = GET_MODE (x);
2733 rtx y_cst = NULL_RTX;
2736 x = protect_from_queue (x, 1);
2737 y = protect_from_queue (y, 0);
2739 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2742 /* Never force constant_p_rtx to memory. */
2743 if (GET_CODE (y) == CONSTANT_P_RTX)
2745 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2748 y = force_const_mem (mode, y);
2751 /* If X or Y are memory references, verify that their addresses are valid
2753 if (GET_CODE (x) == MEM
2754 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2755 && ! push_operand (x, GET_MODE (x)))
2757 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2758 x = validize_mem (x);
2760 if (GET_CODE (y) == MEM
2761 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2763 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2764 y = validize_mem (y);
2766 if (mode == BLKmode)
2769 last_insn = emit_move_insn_1 (x, y);
2771 if (y_cst && GET_CODE (x) == REG)
2772 REG_NOTES (last_insn)
2773 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2778 /* Low level part of emit_move_insn.
2779 Called just like emit_move_insn, but assumes X and Y
2780 are basically valid. */
2783 emit_move_insn_1 (x, y)
2786 enum machine_mode mode = GET_MODE (x);
2787 enum machine_mode submode;
2788 enum mode_class class = GET_MODE_CLASS (mode);
2791 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2794 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2796 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2798 /* Expand complex moves by moving real part and imag part, if possible. */
2799 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2800 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2802 (class == MODE_COMPLEX_INT
2803 ? MODE_INT : MODE_FLOAT),
2805 && (mov_optab->handlers[(int) submode].insn_code
2806 != CODE_FOR_nothing))
2808 /* Don't split destination if it is a stack push. */
2809 int stack = push_operand (x, GET_MODE (x));
2811 #ifdef PUSH_ROUNDING
2812 /* In case we output to the stack, but the size is smaller machine can
2813 push exactly, we need to use move instructions. */
2815 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2818 int offset1, offset2;
2820 /* Do not use anti_adjust_stack, since we don't want to update
2821 stack_pointer_delta. */
2822 temp = expand_binop (Pmode,
2823 #ifdef STACK_GROWS_DOWNWARD
2830 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2834 if (temp != stack_pointer_rtx)
2835 emit_move_insn (stack_pointer_rtx, temp);
2836 #ifdef STACK_GROWS_DOWNWARD
2838 offset2 = GET_MODE_SIZE (submode);
2840 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2841 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2842 + GET_MODE_SIZE (submode));
2844 emit_move_insn (change_address (x, submode,
2845 gen_rtx_PLUS (Pmode,
2847 GEN_INT (offset1))),
2848 gen_realpart (submode, y));
2849 emit_move_insn (change_address (x, submode,
2850 gen_rtx_PLUS (Pmode,
2852 GEN_INT (offset2))),
2853 gen_imagpart (submode, y));
2857 /* If this is a stack, push the highpart first, so it
2858 will be in the argument order.
2860 In that case, change_address is used only to convert
2861 the mode, not to change the address. */
2864 /* Note that the real part always precedes the imag part in memory
2865 regardless of machine's endianness. */
2866 #ifdef STACK_GROWS_DOWNWARD
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_imagpart (submode, y)));
2870 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2871 (gen_rtx_MEM (submode, XEXP (x, 0)),
2872 gen_realpart (submode, y)));
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_realpart (submode, y)));
2877 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2878 (gen_rtx_MEM (submode, XEXP (x, 0)),
2879 gen_imagpart (submode, y)));
2884 rtx realpart_x, realpart_y;
2885 rtx imagpart_x, imagpart_y;
2887 /* If this is a complex value with each part being smaller than a
2888 word, the usual calling sequence will likely pack the pieces into
2889 a single register. Unfortunately, SUBREG of hard registers only
2890 deals in terms of words, so we have a problem converting input
2891 arguments to the CONCAT of two registers that is used elsewhere
2892 for complex values. If this is before reload, we can copy it into
2893 memory and reload. FIXME, we should see about using extract and
2894 insert on integer registers, but complex short and complex char
2895 variables should be rarely used. */
2896 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2897 && (reload_in_progress | reload_completed) == 0)
2899 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2900 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2902 if (packed_dest_p || packed_src_p)
2904 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2905 ? MODE_FLOAT : MODE_INT);
2907 enum machine_mode reg_mode
2908 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2910 if (reg_mode != BLKmode)
2912 rtx mem = assign_stack_temp (reg_mode,
2913 GET_MODE_SIZE (mode), 0);
2914 rtx cmem = adjust_address (mem, mode, 0);
2917 = N_("function using short complex types cannot be inline");
2921 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2922 emit_move_insn_1 (cmem, y);
2923 return emit_move_insn_1 (sreg, mem);
2927 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2928 emit_move_insn_1 (mem, sreg);
2929 return emit_move_insn_1 (x, cmem);
2935 realpart_x = gen_realpart (submode, x);
2936 realpart_y = gen_realpart (submode, y);
2937 imagpart_x = gen_imagpart (submode, x);
2938 imagpart_y = gen_imagpart (submode, y);
2940 /* Show the output dies here. This is necessary for SUBREGs
2941 of pseudos since we cannot track their lifetimes correctly;
2942 hard regs shouldn't appear here except as return values.
2943 We never want to emit such a clobber after reload. */
2945 && ! (reload_in_progress || reload_completed)
2946 && (GET_CODE (realpart_x) == SUBREG
2947 || GET_CODE (imagpart_x) == SUBREG))
2949 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2952 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2953 (realpart_x, realpart_y));
2954 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2955 (imagpart_x, imagpart_y));
2958 return get_last_insn ();
2961 /* This will handle any multi-word mode that lacks a move_insn pattern.
2962 However, you will get better code if you define such patterns,
2963 even if they must turn into multiple assembler instructions. */
2964 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2970 #ifdef PUSH_ROUNDING
2972 /* If X is a push on the stack, do the push now and replace
2973 X with a reference to the stack pointer. */
2974 if (push_operand (x, GET_MODE (x)))
2979 /* Do not use anti_adjust_stack, since we don't want to update
2980 stack_pointer_delta. */
2981 temp = expand_binop (Pmode,
2982 #ifdef STACK_GROWS_DOWNWARD
2989 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2993 if (temp != stack_pointer_rtx)
2994 emit_move_insn (stack_pointer_rtx, temp);
2996 code = GET_CODE (XEXP (x, 0));
2997 /* Just hope that small offsets off SP are OK. */
2998 if (code == POST_INC)
2999 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3000 GEN_INT (-(HOST_WIDE_INT)
3001 GET_MODE_SIZE (GET_MODE (x))));
3002 else if (code == POST_DEC)
3003 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3004 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3006 temp = stack_pointer_rtx;
3008 x = change_address (x, VOIDmode, temp);
3012 /* If we are in reload, see if either operand is a MEM whose address
3013 is scheduled for replacement. */
3014 if (reload_in_progress && GET_CODE (x) == MEM
3015 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3016 x = replace_equiv_address_nv (x, inner);
3017 if (reload_in_progress && GET_CODE (y) == MEM
3018 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3019 y = replace_equiv_address_nv (y, inner);
3025 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3028 rtx xpart = operand_subword (x, i, 1, mode);
3029 rtx ypart = operand_subword (y, i, 1, mode);
3031 /* If we can't get a part of Y, put Y into memory if it is a
3032 constant. Otherwise, force it into a register. If we still
3033 can't get a part of Y, abort. */
3034 if (ypart == 0 && CONSTANT_P (y))
3036 y = force_const_mem (mode, y);
3037 ypart = operand_subword (y, i, 1, mode);
3039 else if (ypart == 0)
3040 ypart = operand_subword_force (y, i, mode);
3042 if (xpart == 0 || ypart == 0)
3045 need_clobber |= (GET_CODE (xpart) == SUBREG);
3047 last_insn = emit_move_insn (xpart, ypart);
3050 seq = gen_sequence ();
3053 /* Show the output dies here. This is necessary for SUBREGs
3054 of pseudos since we cannot track their lifetimes correctly;
3055 hard regs shouldn't appear here except as return values.
3056 We never want to emit such a clobber after reload. */
3058 && ! (reload_in_progress || reload_completed)
3059 && need_clobber != 0)
3061 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3072 /* Pushing data onto the stack. */
3074 /* Push a block of length SIZE (perhaps variable)
3075 and return an rtx to address the beginning of the block.
3076 Note that it is not possible for the value returned to be a QUEUED.
3077 The value may be virtual_outgoing_args_rtx.
3079 EXTRA is the number of bytes of padding to push in addition to SIZE.
3080 BELOW nonzero means this padding comes at low addresses;
3081 otherwise, the padding comes at high addresses. */
3084 push_block (size, extra, below)
3090 size = convert_modes (Pmode, ptr_mode, size, 1);
3091 if (CONSTANT_P (size))
3092 anti_adjust_stack (plus_constant (size, extra));
3093 else if (GET_CODE (size) == REG && extra == 0)
3094 anti_adjust_stack (size);
3097 temp = copy_to_mode_reg (Pmode, size);
3099 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3100 temp, 0, OPTAB_LIB_WIDEN);
3101 anti_adjust_stack (temp);
3104 #ifndef STACK_GROWS_DOWNWARD
3110 temp = virtual_outgoing_args_rtx;
3111 if (extra != 0 && below)
3112 temp = plus_constant (temp, extra);
3116 if (GET_CODE (size) == CONST_INT)
3117 temp = plus_constant (virtual_outgoing_args_rtx,
3118 -INTVAL (size) - (below ? 0 : extra));
3119 else if (extra != 0 && !below)
3120 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3121 negate_rtx (Pmode, plus_constant (size, extra)));
3123 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3124 negate_rtx (Pmode, size));
3127 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3131 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3132 block of SIZE bytes. */
3135 get_push_address (size)
3140 if (STACK_PUSH_CODE == POST_DEC)
3141 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3142 else if (STACK_PUSH_CODE == POST_INC)
3143 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3145 temp = stack_pointer_rtx;
3147 return copy_to_reg (temp);
3150 #ifdef PUSH_ROUNDING
3152 /* Emit single push insn. */
3155 emit_single_push_insn (mode, x, type)
3157 enum machine_mode mode;
3161 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3163 enum insn_code icode;
3164 insn_operand_predicate_fn pred;
3166 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3167 /* If there is push pattern, use it. Otherwise try old way of throwing
3168 MEM representing push operation to move expander. */
3169 icode = push_optab->handlers[(int) mode].insn_code;
3170 if (icode != CODE_FOR_nothing)
3172 if (((pred = insn_data[(int) icode].operand[0].predicate)
3173 && !((*pred) (x, mode))))
3174 x = force_reg (mode, x);
3175 emit_insn (GEN_FCN (icode) (x));
3178 if (GET_MODE_SIZE (mode) == rounded_size)
3179 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3182 #ifdef STACK_GROWS_DOWNWARD
3183 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3184 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3186 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3187 GEN_INT (rounded_size));
3189 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3192 dest = gen_rtx_MEM (mode, dest_addr);
3196 set_mem_attributes (dest, type, 1);
3197 /* Function incoming arguments may overlap with sibling call
3198 outgoing arguments and we cannot allow reordering of reads
3199 from function arguments with stores to outgoing arguments
3200 of sibling calls. */
3201 set_mem_alias_set (dest, 0);
3203 emit_move_insn (dest, x);
3207 /* Generate code to push X onto the stack, assuming it has mode MODE and
3209 MODE is redundant except when X is a CONST_INT (since they don't
3211 SIZE is an rtx for the size of data to be copied (in bytes),
3212 needed only if X is BLKmode.
3214 ALIGN (in bits) is maximum alignment we can assume.
3216 If PARTIAL and REG are both nonzero, then copy that many of the first
3217 words of X into registers starting with REG, and push the rest of X.
3218 The amount of space pushed is decreased by PARTIAL words,
3219 rounded *down* to a multiple of PARM_BOUNDARY.
3220 REG must be a hard register in this case.
3221 If REG is zero but PARTIAL is not, take any all others actions for an
3222 argument partially in registers, but do not actually load any
3225 EXTRA is the amount in bytes of extra space to leave next to this arg.
3226 This is ignored if an argument block has already been allocated.
3228 On a machine that lacks real push insns, ARGS_ADDR is the address of
3229 the bottom of the argument block for this call. We use indexing off there
3230 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3231 argument block has not been preallocated.
3233 ARGS_SO_FAR is the size of args previously pushed for this call.
3235 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3236 for arguments passed in registers. If nonzero, it will be the number
3237 of bytes required. */
3240 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3241 args_addr, args_so_far, reg_parm_stack_space,
3244 enum machine_mode mode;
3253 int reg_parm_stack_space;
3257 enum direction stack_direction
3258 #ifdef STACK_GROWS_DOWNWARD
3264 /* Decide where to pad the argument: `downward' for below,
3265 `upward' for above, or `none' for don't pad it.
3266 Default is below for small data on big-endian machines; else above. */
3267 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3269 /* Invert direction if stack is post-decrement.
3271 if (STACK_PUSH_CODE == POST_DEC)
3272 if (where_pad != none)
3273 where_pad = (where_pad == downward ? upward : downward);
3275 xinner = x = protect_from_queue (x, 0);
3277 if (mode == BLKmode)
3279 /* Copy a block into the stack, entirely or partially. */
3282 int used = partial * UNITS_PER_WORD;
3283 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3291 /* USED is now the # of bytes we need not copy to the stack
3292 because registers will take care of them. */
3295 xinner = adjust_address (xinner, BLKmode, used);
3297 /* If the partial register-part of the arg counts in its stack size,
3298 skip the part of stack space corresponding to the registers.
3299 Otherwise, start copying to the beginning of the stack space,
3300 by setting SKIP to 0. */
3301 skip = (reg_parm_stack_space == 0) ? 0 : used;
3303 #ifdef PUSH_ROUNDING
3304 /* Do it with several push insns if that doesn't take lots of insns
3305 and if there is no difficulty with push insns that skip bytes
3306 on the stack for alignment purposes. */
3309 && GET_CODE (size) == CONST_INT
3311 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3312 /* Here we avoid the case of a structure whose weak alignment
3313 forces many pushes of a small amount of data,
3314 and such small pushes do rounding that causes trouble. */
3315 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3316 || align >= BIGGEST_ALIGNMENT
3317 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3318 == (align / BITS_PER_UNIT)))
3319 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3321 /* Push padding now if padding above and stack grows down,
3322 or if padding below and stack grows up.
3323 But if space already allocated, this has already been done. */
3324 if (extra && args_addr == 0
3325 && where_pad != none && where_pad != stack_direction)
3326 anti_adjust_stack (GEN_INT (extra));
3328 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3330 if (current_function_check_memory_usage && ! in_check_memory_usage)
3334 in_check_memory_usage = 1;
3335 temp = get_push_address (INTVAL (size) - used);
3336 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3337 emit_library_call (chkr_copy_bitmap_libfunc,
3338 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3339 Pmode, XEXP (xinner, 0), Pmode,
3340 GEN_INT (INTVAL (size) - used),
3341 TYPE_MODE (sizetype));
3343 emit_library_call (chkr_set_right_libfunc,
3344 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3345 Pmode, GEN_INT (INTVAL (size) - used),
3346 TYPE_MODE (sizetype),
3347 GEN_INT (MEMORY_USE_RW),
3348 TYPE_MODE (integer_type_node));
3349 in_check_memory_usage = 0;
3353 #endif /* PUSH_ROUNDING */
3357 /* Otherwise make space on the stack and copy the data
3358 to the address of that space. */
3360 /* Deduct words put into registers from the size we must copy. */
3363 if (GET_CODE (size) == CONST_INT)
3364 size = GEN_INT (INTVAL (size) - used);
3366 size = expand_binop (GET_MODE (size), sub_optab, size,
3367 GEN_INT (used), NULL_RTX, 0,
3371 /* Get the address of the stack space.
3372 In this case, we do not deal with EXTRA separately.
3373 A single stack adjust will do. */
3376 temp = push_block (size, extra, where_pad == downward);
3379 else if (GET_CODE (args_so_far) == CONST_INT)
3380 temp = memory_address (BLKmode,
3381 plus_constant (args_addr,
3382 skip + INTVAL (args_so_far)));
3384 temp = memory_address (BLKmode,
3385 plus_constant (gen_rtx_PLUS (Pmode,
3389 if (current_function_check_memory_usage && ! in_check_memory_usage)
3391 in_check_memory_usage = 1;
3392 target = copy_to_reg (temp);
3393 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3394 emit_library_call (chkr_copy_bitmap_libfunc,
3395 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3397 XEXP (xinner, 0), Pmode,
3398 size, TYPE_MODE (sizetype));
3400 emit_library_call (chkr_set_right_libfunc,
3401 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3403 size, TYPE_MODE (sizetype),
3404 GEN_INT (MEMORY_USE_RW),
3405 TYPE_MODE (integer_type_node));
3406 in_check_memory_usage = 0;
3409 target = gen_rtx_MEM (BLKmode, temp);
3413 set_mem_attributes (target, type, 1);
3414 /* Function incoming arguments may overlap with sibling call
3415 outgoing arguments and we cannot allow reordering of reads
3416 from function arguments with stores to outgoing arguments
3417 of sibling calls. */
3418 set_mem_alias_set (target, 0);
3421 /* TEMP is the address of the block. Copy the data there. */
3422 if (GET_CODE (size) == CONST_INT
3423 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3425 move_by_pieces (target, xinner, INTVAL (size), align);
3430 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3431 enum machine_mode mode;
3433 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3435 mode = GET_MODE_WIDER_MODE (mode))
3437 enum insn_code code = movstr_optab[(int) mode];
3438 insn_operand_predicate_fn pred;
3440 if (code != CODE_FOR_nothing
3441 && ((GET_CODE (size) == CONST_INT
3442 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3443 <= (GET_MODE_MASK (mode) >> 1)))
3444 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3445 && (!(pred = insn_data[(int) code].operand[0].predicate)
3446 || ((*pred) (target, BLKmode)))
3447 && (!(pred = insn_data[(int) code].operand[1].predicate)
3448 || ((*pred) (xinner, BLKmode)))
3449 && (!(pred = insn_data[(int) code].operand[3].predicate)
3450 || ((*pred) (opalign, VOIDmode))))
3452 rtx op2 = convert_to_mode (mode, size, 1);
3453 rtx last = get_last_insn ();
3456 pred = insn_data[(int) code].operand[2].predicate;
3457 if (pred != 0 && ! (*pred) (op2, mode))
3458 op2 = copy_to_mode_reg (mode, op2);
3460 pat = GEN_FCN ((int) code) (target, xinner,
3468 delete_insns_since (last);
3473 if (!ACCUMULATE_OUTGOING_ARGS)
3475 /* If the source is referenced relative to the stack pointer,
3476 copy it to another register to stabilize it. We do not need
3477 to do this if we know that we won't be changing sp. */
3479 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3480 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3481 temp = copy_to_reg (temp);
3484 /* Make inhibit_defer_pop nonzero around the library call
3485 to force it to pop the bcopy-arguments right away. */
3487 #ifdef TARGET_MEM_FUNCTIONS
3488 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3489 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3490 convert_to_mode (TYPE_MODE (sizetype),
3491 size, TREE_UNSIGNED (sizetype)),
3492 TYPE_MODE (sizetype));
3494 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3495 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3496 convert_to_mode (TYPE_MODE (integer_type_node),
3498 TREE_UNSIGNED (integer_type_node)),
3499 TYPE_MODE (integer_type_node));
3504 else if (partial > 0)
3506 /* Scalar partly in registers. */
3508 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3511 /* # words of start of argument
3512 that we must make space for but need not store. */
3513 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3514 int args_offset = INTVAL (args_so_far);
3517 /* Push padding now if padding above and stack grows down,
3518 or if padding below and stack grows up.
3519 But if space already allocated, this has already been done. */
3520 if (extra && args_addr == 0
3521 && where_pad != none && where_pad != stack_direction)
3522 anti_adjust_stack (GEN_INT (extra));
3524 /* If we make space by pushing it, we might as well push
3525 the real data. Otherwise, we can leave OFFSET nonzero
3526 and leave the space uninitialized. */
3530 /* Now NOT_STACK gets the number of words that we don't need to
3531 allocate on the stack. */
3532 not_stack = partial - offset;
3534 /* If the partial register-part of the arg counts in its stack size,
3535 skip the part of stack space corresponding to the registers.
3536 Otherwise, start copying to the beginning of the stack space,
3537 by setting SKIP to 0. */
3538 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3540 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3541 x = validize_mem (force_const_mem (mode, x));
3543 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3544 SUBREGs of such registers are not allowed. */
3545 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3546 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3547 x = copy_to_reg (x);
3549 /* Loop over all the words allocated on the stack for this arg. */
3550 /* We can do it by words, because any scalar bigger than a word
3551 has a size a multiple of a word. */
3552 #ifndef PUSH_ARGS_REVERSED
3553 for (i = not_stack; i < size; i++)
3555 for (i = size - 1; i >= not_stack; i--)
3557 if (i >= not_stack + offset)
3558 emit_push_insn (operand_subword_force (x, i, mode),
3559 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3561 GEN_INT (args_offset + ((i - not_stack + skip)
3563 reg_parm_stack_space, alignment_pad);
3568 rtx target = NULL_RTX;
3571 /* Push padding now if padding above and stack grows down,
3572 or if padding below and stack grows up.
3573 But if space already allocated, this has already been done. */
3574 if (extra && args_addr == 0
3575 && where_pad != none && where_pad != stack_direction)
3576 anti_adjust_stack (GEN_INT (extra));
3578 #ifdef PUSH_ROUNDING
3579 if (args_addr == 0 && PUSH_ARGS)
3580 emit_single_push_insn (mode, x, type);
3584 if (GET_CODE (args_so_far) == CONST_INT)
3586 = memory_address (mode,
3587 plus_constant (args_addr,
3588 INTVAL (args_so_far)));
3590 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3593 dest = gen_rtx_MEM (mode, addr);
3596 set_mem_attributes (dest, type, 1);
3597 /* Function incoming arguments may overlap with sibling call
3598 outgoing arguments and we cannot allow reordering of reads
3599 from function arguments with stores to outgoing arguments
3600 of sibling calls. */
3601 set_mem_alias_set (dest, 0);
3604 emit_move_insn (dest, x);
3608 if (current_function_check_memory_usage && ! in_check_memory_usage)
3610 in_check_memory_usage = 1;
3612 target = get_push_address (GET_MODE_SIZE (mode));
3614 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3615 emit_library_call (chkr_copy_bitmap_libfunc,
3616 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3617 Pmode, XEXP (x, 0), Pmode,
3618 GEN_INT (GET_MODE_SIZE (mode)),
3619 TYPE_MODE (sizetype));
3621 emit_library_call (chkr_set_right_libfunc,
3622 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3623 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3624 TYPE_MODE (sizetype),
3625 GEN_INT (MEMORY_USE_RW),
3626 TYPE_MODE (integer_type_node));
3627 in_check_memory_usage = 0;
3632 /* If part should go in registers, copy that part
3633 into the appropriate registers. Do this now, at the end,
3634 since mem-to-mem copies above may do function calls. */
3635 if (partial > 0 && reg != 0)
3637 /* Handle calls that pass values in multiple non-contiguous locations.
3638 The Irix 6 ABI has examples of this. */
3639 if (GET_CODE (reg) == PARALLEL)
3640 emit_group_load (reg, x, -1, align); /* ??? size? */
3642 move_block_to_reg (REGNO (reg), x, partial, mode);
3645 if (extra && args_addr == 0 && where_pad == stack_direction)
3646 anti_adjust_stack (GEN_INT (extra));
3648 if (alignment_pad && args_addr == 0)
3649 anti_adjust_stack (alignment_pad);
3652 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3660 /* Only registers can be subtargets. */
3661 || GET_CODE (x) != REG
3662 /* If the register is readonly, it can't be set more than once. */
3663 || RTX_UNCHANGING_P (x)
3664 /* Don't use hard regs to avoid extending their life. */
3665 || REGNO (x) < FIRST_PSEUDO_REGISTER
3666 /* Avoid subtargets inside loops,
3667 since they hide some invariant expressions. */
3668 || preserve_subexpressions_p ())
3672 /* Expand an assignment that stores the value of FROM into TO.
3673 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3674 (This may contain a QUEUED rtx;
3675 if the value is constant, this rtx is a constant.)
3676 Otherwise, the returned value is NULL_RTX.
3678 SUGGEST_REG is no longer actually used.
3679 It used to mean, copy the value through a register
3680 and return that register, if that is possible.
3681 We now use WANT_VALUE to decide whether to do this. */
3684 expand_assignment (to, from, want_value, suggest_reg)
3687 int suggest_reg ATTRIBUTE_UNUSED;
3689 register rtx to_rtx = 0;
3692 /* Don't crash if the lhs of the assignment was erroneous. */
3694 if (TREE_CODE (to) == ERROR_MARK)
3696 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3697 return want_value ? result : NULL_RTX;
3700 /* Assignment of a structure component needs special treatment
3701 if the structure component's rtx is not simply a MEM.
3702 Assignment of an array element at a constant index, and assignment of
3703 an array element in an unaligned packed structure field, has the same
3706 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3707 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3709 enum machine_mode mode1;
3710 HOST_WIDE_INT bitsize, bitpos;
3715 unsigned int alignment;
3718 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3719 &unsignedp, &volatilep, &alignment);
3721 /* If we are going to use store_bit_field and extract_bit_field,
3722 make sure to_rtx will be safe for multiple use. */
3724 if (mode1 == VOIDmode && want_value)
3725 tem = stabilize_reference (tem);
3727 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3730 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3732 if (GET_CODE (to_rtx) != MEM)
3735 if (GET_MODE (offset_rtx) != ptr_mode)
3737 #ifdef POINTERS_EXTEND_UNSIGNED
3738 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3740 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3744 /* A constant address in TO_RTX can have VOIDmode, we must not try
3745 to call force_reg for that case. Avoid that case. */
3746 if (GET_CODE (to_rtx) == MEM
3747 && GET_MODE (to_rtx) == BLKmode
3748 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3750 && (bitpos % bitsize) == 0
3751 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3752 && alignment == GET_MODE_ALIGNMENT (mode1))
3755 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3757 if (GET_CODE (XEXP (temp, 0)) == REG)
3760 to_rtx = (replace_equiv_address
3761 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3766 to_rtx = change_address (to_rtx, VOIDmode,
3767 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3768 force_reg (ptr_mode,
3774 if (GET_CODE (to_rtx) == MEM)
3776 /* When the offset is zero, to_rtx is the address of the
3777 structure we are storing into, and hence may be shared.
3778 We must make a new MEM before setting the volatile bit. */
3780 to_rtx = copy_rtx (to_rtx);
3782 MEM_VOLATILE_P (to_rtx) = 1;
3784 #if 0 /* This was turned off because, when a field is volatile
3785 in an object which is not volatile, the object may be in a register,
3786 and then we would abort over here. */
3792 if (TREE_CODE (to) == COMPONENT_REF
3793 && TREE_READONLY (TREE_OPERAND (to, 1)))
3796 to_rtx = copy_rtx (to_rtx);
3798 RTX_UNCHANGING_P (to_rtx) = 1;
3801 /* Check the access. */
3802 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3807 enum machine_mode best_mode;
3809 best_mode = get_best_mode (bitsize, bitpos,
3810 TYPE_ALIGN (TREE_TYPE (tem)),
3812 if (best_mode == VOIDmode)
3815 best_mode_size = GET_MODE_BITSIZE (best_mode);
3816 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3817 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3818 size *= GET_MODE_SIZE (best_mode);
3820 /* Check the access right of the pointer. */
3821 in_check_memory_usage = 1;
3823 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3824 VOIDmode, 3, to_addr, Pmode,
3825 GEN_INT (size), TYPE_MODE (sizetype),
3826 GEN_INT (MEMORY_USE_WO),
3827 TYPE_MODE (integer_type_node));
3828 in_check_memory_usage = 0;
3831 /* If this is a varying-length object, we must get the address of
3832 the source and do an explicit block move. */
3835 unsigned int from_align;
3836 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3838 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3840 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3841 MIN (alignment, from_align));
3848 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3850 /* Spurious cast for HPUX compiler. */
3851 ? ((enum machine_mode)
3852 TYPE_MODE (TREE_TYPE (to)))
3856 int_size_in_bytes (TREE_TYPE (tem)),
3857 get_alias_set (to));
3859 preserve_temp_slots (result);
3863 /* If the value is meaningful, convert RESULT to the proper mode.
3864 Otherwise, return nothing. */
3865 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3866 TYPE_MODE (TREE_TYPE (from)),
3868 TREE_UNSIGNED (TREE_TYPE (to)))
3873 /* If the rhs is a function call and its value is not an aggregate,
3874 call the function before we start to compute the lhs.
3875 This is needed for correct code for cases such as
3876 val = setjmp (buf) on machines where reference to val
3877 requires loading up part of an address in a separate insn.
3879 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3880 since it might be a promoted variable where the zero- or sign- extension
3881 needs to be done. Handling this in the normal way is safe because no
3882 computation is done before the call. */
3883 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3884 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3885 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3886 && GET_CODE (DECL_RTL (to)) == REG))
3891 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3893 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3895 /* Handle calls that return values in multiple non-contiguous locations.
3896 The Irix 6 ABI has examples of this. */
3897 if (GET_CODE (to_rtx) == PARALLEL)
3898 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3899 TYPE_ALIGN (TREE_TYPE (from)));
3900 else if (GET_MODE (to_rtx) == BLKmode)
3901 emit_block_move (to_rtx, value, expr_size (from),
3902 TYPE_ALIGN (TREE_TYPE (from)));
3905 #ifdef POINTERS_EXTEND_UNSIGNED
3906 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3907 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3908 value = convert_memory_address (GET_MODE (to_rtx), value);
3910 emit_move_insn (to_rtx, value);
3912 preserve_temp_slots (to_rtx);
3915 return want_value ? to_rtx : NULL_RTX;
3918 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3919 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3923 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3924 if (GET_CODE (to_rtx) == MEM)
3925 set_mem_alias_set (to_rtx, get_alias_set (to));
3928 /* Don't move directly into a return register. */
3929 if (TREE_CODE (to) == RESULT_DECL
3930 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3935 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3937 if (GET_CODE (to_rtx) == PARALLEL)
3938 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3939 TYPE_ALIGN (TREE_TYPE (from)));
3941 emit_move_insn (to_rtx, temp);
3943 preserve_temp_slots (to_rtx);
3946 return want_value ? to_rtx : NULL_RTX;
3949 /* In case we are returning the contents of an object which overlaps
3950 the place the value is being stored, use a safe function when copying
3951 a value through a pointer into a structure value return block. */
3952 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3953 && current_function_returns_struct
3954 && !current_function_returns_pcc_struct)
3959 size = expr_size (from);
3960 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3961 EXPAND_MEMORY_USE_DONT);
3963 /* Copy the rights of the bitmap. */
3964 if (current_function_check_memory_usage)
3965 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3966 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3967 XEXP (from_rtx, 0), Pmode,
3968 convert_to_mode (TYPE_MODE (sizetype),
3969 size, TREE_UNSIGNED (sizetype)),
3970 TYPE_MODE (sizetype));
3972 #ifdef TARGET_MEM_FUNCTIONS
3973 emit_library_call (memmove_libfunc, LCT_NORMAL,
3974 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3975 XEXP (from_rtx, 0), Pmode,
3976 convert_to_mode (TYPE_MODE (sizetype),
3977 size, TREE_UNSIGNED (sizetype)),
3978 TYPE_MODE (sizetype));
3980 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3981 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3982 XEXP (to_rtx, 0), Pmode,
3983 convert_to_mode (TYPE_MODE (integer_type_node),
3984 size, TREE_UNSIGNED (integer_type_node)),
3985 TYPE_MODE (integer_type_node));
3988 preserve_temp_slots (to_rtx);
3991 return want_value ? to_rtx : NULL_RTX;
3994 /* Compute FROM and store the value in the rtx we got. */
3997 result = store_expr (from, to_rtx, want_value);
3998 preserve_temp_slots (result);
4001 return want_value ? result : NULL_RTX;
4004 /* Generate code for computing expression EXP,
4005 and storing the value into TARGET.
4006 TARGET may contain a QUEUED rtx.
4008 If WANT_VALUE is nonzero, return a copy of the value
4009 not in TARGET, so that we can be sure to use the proper
4010 value in a containing expression even if TARGET has something
4011 else stored in it. If possible, we copy the value through a pseudo
4012 and return that pseudo. Or, if the value is constant, we try to
4013 return the constant. In some cases, we return a pseudo
4014 copied *from* TARGET.
4016 If the mode is BLKmode then we may return TARGET itself.
4017 It turns out that in BLKmode it doesn't cause a problem.
4018 because C has no operators that could combine two different
4019 assignments into the same BLKmode object with different values
4020 with no sequence point. Will other languages need this to
4023 If WANT_VALUE is 0, we return NULL, to make sure
4024 to catch quickly any cases where the caller uses the value
4025 and fails to set WANT_VALUE. */
4028 store_expr (exp, target, want_value)
4030 register rtx target;
4034 int dont_return_target = 0;
4035 int dont_store_target = 0;
4037 if (TREE_CODE (exp) == COMPOUND_EXPR)
4039 /* Perform first part of compound expression, then assign from second
4041 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4043 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4045 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4047 /* For conditional expression, get safe form of the target. Then
4048 test the condition, doing the appropriate assignment on either
4049 side. This avoids the creation of unnecessary temporaries.
4050 For non-BLKmode, it is more efficient not to do this. */
4052 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4055 target = protect_from_queue (target, 1);
4057 do_pending_stack_adjust ();
4059 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4060 start_cleanup_deferral ();
4061 store_expr (TREE_OPERAND (exp, 1), target, 0);
4062 end_cleanup_deferral ();
4064 emit_jump_insn (gen_jump (lab2));
4067 start_cleanup_deferral ();
4068 store_expr (TREE_OPERAND (exp, 2), target, 0);
4069 end_cleanup_deferral ();
4074 return want_value ? target : NULL_RTX;
4076 else if (queued_subexp_p (target))
4077 /* If target contains a postincrement, let's not risk
4078 using it as the place to generate the rhs. */
4080 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4082 /* Expand EXP into a new pseudo. */
4083 temp = gen_reg_rtx (GET_MODE (target));
4084 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4087 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4089 /* If target is volatile, ANSI requires accessing the value
4090 *from* the target, if it is accessed. So make that happen.
4091 In no case return the target itself. */
4092 if (! MEM_VOLATILE_P (target) && want_value)
4093 dont_return_target = 1;
4095 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4096 && GET_MODE (target) != BLKmode)
4097 /* If target is in memory and caller wants value in a register instead,
4098 arrange that. Pass TARGET as target for expand_expr so that,
4099 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4100 We know expand_expr will not use the target in that case.
4101 Don't do this if TARGET is volatile because we are supposed
4102 to write it and then read it. */
4104 temp = expand_expr (exp, target, GET_MODE (target), 0);
4105 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4107 /* If TEMP is already in the desired TARGET, only copy it from
4108 memory and don't store it there again. */
4110 || (rtx_equal_p (temp, target)
4111 && ! side_effects_p (temp) && ! side_effects_p (target)))
4112 dont_store_target = 1;
4113 temp = copy_to_reg (temp);
4115 dont_return_target = 1;
4117 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4118 /* If this is an scalar in a register that is stored in a wider mode
4119 than the declared mode, compute the result into its declared mode
4120 and then convert to the wider mode. Our value is the computed
4123 /* If we don't want a value, we can do the conversion inside EXP,
4124 which will often result in some optimizations. Do the conversion
4125 in two steps: first change the signedness, if needed, then
4126 the extend. But don't do this if the type of EXP is a subtype
4127 of something else since then the conversion might involve
4128 more than just converting modes. */
4129 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4130 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4132 if (TREE_UNSIGNED (TREE_TYPE (exp))
4133 != SUBREG_PROMOTED_UNSIGNED_P (target))
4136 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4140 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4141 SUBREG_PROMOTED_UNSIGNED_P (target)),
4145 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4147 /* If TEMP is a volatile MEM and we want a result value, make
4148 the access now so it gets done only once. Likewise if
4149 it contains TARGET. */
4150 if (GET_CODE (temp) == MEM && want_value
4151 && (MEM_VOLATILE_P (temp)
4152 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4153 temp = copy_to_reg (temp);
4155 /* If TEMP is a VOIDmode constant, use convert_modes to make
4156 sure that we properly convert it. */
4157 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4158 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4159 TYPE_MODE (TREE_TYPE (exp)), temp,
4160 SUBREG_PROMOTED_UNSIGNED_P (target));
4162 convert_move (SUBREG_REG (target), temp,
4163 SUBREG_PROMOTED_UNSIGNED_P (target));
4165 /* If we promoted a constant, change the mode back down to match
4166 target. Otherwise, the caller might get confused by a result whose
4167 mode is larger than expected. */
4169 if (want_value && GET_MODE (temp) != GET_MODE (target)
4170 && GET_MODE (temp) != VOIDmode)
4172 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4173 SUBREG_PROMOTED_VAR_P (temp) = 1;
4174 SUBREG_PROMOTED_UNSIGNED_P (temp)
4175 = SUBREG_PROMOTED_UNSIGNED_P (target);
4178 return want_value ? temp : NULL_RTX;
4182 temp = expand_expr (exp, target, GET_MODE (target), 0);
4183 /* Return TARGET if it's a specified hardware register.
4184 If TARGET is a volatile mem ref, either return TARGET
4185 or return a reg copied *from* TARGET; ANSI requires this.
4187 Otherwise, if TEMP is not TARGET, return TEMP
4188 if it is constant (for efficiency),
4189 or if we really want the correct value. */
4190 if (!(target && GET_CODE (target) == REG
4191 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4192 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4193 && ! rtx_equal_p (temp, target)
4194 && (CONSTANT_P (temp) || want_value))
4195 dont_return_target = 1;
4198 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4199 the same as that of TARGET, adjust the constant. This is needed, for
4200 example, in case it is a CONST_DOUBLE and we want only a word-sized
4202 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4203 && TREE_CODE (exp) != ERROR_MARK
4204 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4205 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4206 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4208 if (current_function_check_memory_usage
4209 && GET_CODE (target) == MEM
4210 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4212 in_check_memory_usage = 1;
4213 if (GET_CODE (temp) == MEM)
4214 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4215 VOIDmode, 3, XEXP (target, 0), Pmode,
4216 XEXP (temp, 0), Pmode,
4217 expr_size (exp), TYPE_MODE (sizetype));
4219 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4220 VOIDmode, 3, XEXP (target, 0), Pmode,
4221 expr_size (exp), TYPE_MODE (sizetype),
4222 GEN_INT (MEMORY_USE_WO),
4223 TYPE_MODE (integer_type_node));
4224 in_check_memory_usage = 0;
4227 /* If value was not generated in the target, store it there.
4228 Convert the value to TARGET's type first if nec. */
4229 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4230 one or both of them are volatile memory refs, we have to distinguish
4232 - expand_expr has used TARGET. In this case, we must not generate
4233 another copy. This can be detected by TARGET being equal according
4235 - expand_expr has not used TARGET - that means that the source just
4236 happens to have the same RTX form. Since temp will have been created
4237 by expand_expr, it will compare unequal according to == .
4238 We must generate a copy in this case, to reach the correct number
4239 of volatile memory references. */
4241 if ((! rtx_equal_p (temp, target)
4242 || (temp != target && (side_effects_p (temp)
4243 || side_effects_p (target))))
4244 && TREE_CODE (exp) != ERROR_MARK
4245 && ! dont_store_target)
4247 target = protect_from_queue (target, 1);
4248 if (GET_MODE (temp) != GET_MODE (target)
4249 && GET_MODE (temp) != VOIDmode)
4251 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4252 if (dont_return_target)
4254 /* In this case, we will return TEMP,
4255 so make sure it has the proper mode.
4256 But don't forget to store the value into TARGET. */
4257 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4258 emit_move_insn (target, temp);
4261 convert_move (target, temp, unsignedp);
4264 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4266 /* Handle copying a string constant into an array.
4267 The string constant may be shorter than the array.
4268 So copy just the string's actual length, and clear the rest. */
4272 /* Get the size of the data type of the string,
4273 which is actually the size of the target. */
4274 size = expr_size (exp);
4275 if (GET_CODE (size) == CONST_INT
4276 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4277 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4280 /* Compute the size of the data to copy from the string. */
4282 = size_binop (MIN_EXPR,
4283 make_tree (sizetype, size),
4284 size_int (TREE_STRING_LENGTH (exp)));
4285 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4286 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4290 /* Copy that much. */
4291 emit_block_move (target, temp, copy_size_rtx,
4292 TYPE_ALIGN (TREE_TYPE (exp)));
4294 /* Figure out how much is left in TARGET that we have to clear.
4295 Do all calculations in ptr_mode. */
4297 addr = XEXP (target, 0);
4298 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4300 if (GET_CODE (copy_size_rtx) == CONST_INT)
4302 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4303 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4305 (unsigned int) (BITS_PER_UNIT
4306 * (INTVAL (copy_size_rtx)
4307 & - INTVAL (copy_size_rtx))));
4311 addr = force_reg (ptr_mode, addr);
4312 addr = expand_binop (ptr_mode, add_optab, addr,
4313 copy_size_rtx, NULL_RTX, 0,
4316 size = expand_binop (ptr_mode, sub_optab, size,
4317 copy_size_rtx, NULL_RTX, 0,
4320 align = BITS_PER_UNIT;
4321 label = gen_label_rtx ();
4322 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4323 GET_MODE (size), 0, 0, label);
4325 align = MIN (align, expr_align (copy_size));
4327 if (size != const0_rtx)
4329 rtx dest = gen_rtx_MEM (BLKmode, addr);
4331 MEM_COPY_ATTRIBUTES (dest, target);
4333 /* Be sure we can write on ADDR. */
4334 in_check_memory_usage = 1;
4335 if (current_function_check_memory_usage)
4336 emit_library_call (chkr_check_addr_libfunc,
4337 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4339 size, TYPE_MODE (sizetype),
4340 GEN_INT (MEMORY_USE_WO),
4341 TYPE_MODE (integer_type_node));
4342 in_check_memory_usage = 0;
4343 clear_storage (dest, size, align);
4350 /* Handle calls that return values in multiple non-contiguous locations.
4351 The Irix 6 ABI has examples of this. */
4352 else if (GET_CODE (target) == PARALLEL)
4353 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4354 TYPE_ALIGN (TREE_TYPE (exp)));
4355 else if (GET_MODE (temp) == BLKmode)
4356 emit_block_move (target, temp, expr_size (exp),
4357 TYPE_ALIGN (TREE_TYPE (exp)));
4359 emit_move_insn (target, temp);
4362 /* If we don't want a value, return NULL_RTX. */
4366 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4367 ??? The latter test doesn't seem to make sense. */
4368 else if (dont_return_target && GET_CODE (temp) != MEM)
4371 /* Return TARGET itself if it is a hard register. */
4372 else if (want_value && GET_MODE (target) != BLKmode
4373 && ! (GET_CODE (target) == REG
4374 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4375 return copy_to_reg (target);
4381 /* Return 1 if EXP just contains zeros. */
4389 switch (TREE_CODE (exp))
4393 case NON_LVALUE_EXPR:
4394 return is_zeros_p (TREE_OPERAND (exp, 0));
4397 return integer_zerop (exp);
4401 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4404 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4407 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4408 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4409 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4410 if (! is_zeros_p (TREE_VALUE (elt)))
4420 /* Return 1 if EXP contains mostly (3/4) zeros. */
4423 mostly_zeros_p (exp)
4426 if (TREE_CODE (exp) == CONSTRUCTOR)
4428 int elts = 0, zeros = 0;
4429 tree elt = CONSTRUCTOR_ELTS (exp);
4430 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4432 /* If there are no ranges of true bits, it is all zero. */
4433 return elt == NULL_TREE;
4435 for (; elt; elt = TREE_CHAIN (elt))
4437 /* We do not handle the case where the index is a RANGE_EXPR,
4438 so the statistic will be somewhat inaccurate.
4439 We do make a more accurate count in store_constructor itself,
4440 so since this function is only used for nested array elements,
4441 this should be close enough. */
4442 if (mostly_zeros_p (TREE_VALUE (elt)))
4447 return 4 * zeros >= 3 * elts;
4450 return is_zeros_p (exp);
4453 /* Helper function for store_constructor.
4454 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4455 TYPE is the type of the CONSTRUCTOR, not the element type.
4456 ALIGN and CLEARED are as for store_constructor.
4457 ALIAS_SET is the alias set to use for any stores.
4459 This provides a recursive shortcut back to store_constructor when it isn't
4460 necessary to go through store_field. This is so that we can pass through
4461 the cleared field to let store_constructor know that we may not have to
4462 clear a substructure if the outer structure has already been cleared. */
4465 store_constructor_field (target, bitsize, bitpos,
4466 mode, exp, type, align, cleared, alias_set)
4468 unsigned HOST_WIDE_INT bitsize;
4469 HOST_WIDE_INT bitpos;
4470 enum machine_mode mode;
4476 if (TREE_CODE (exp) == CONSTRUCTOR
4477 && bitpos % BITS_PER_UNIT == 0
4478 /* If we have a non-zero bitpos for a register target, then we just
4479 let store_field do the bitfield handling. This is unlikely to
4480 generate unnecessary clear instructions anyways. */
4481 && (bitpos == 0 || GET_CODE (target) == MEM))
4485 = adjust_address (target,
4486 GET_MODE (target) == BLKmode
4488 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4489 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4492 /* Show the alignment may no longer be what it was and update the alias
4493 set, if required. */
4495 align = MIN (align, (unsigned int) bitpos & - bitpos);
4496 if (GET_CODE (target) == MEM)
4497 set_mem_alias_set (target, alias_set);
4499 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4502 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4503 int_size_in_bytes (type), alias_set);
4506 /* Store the value of constructor EXP into the rtx TARGET.
4507 TARGET is either a REG or a MEM.
4508 ALIGN is the maximum known alignment for TARGET.
4509 CLEARED is true if TARGET is known to have been zero'd.
4510 SIZE is the number of bytes of TARGET we are allowed to modify: this
4511 may not be the same as the size of EXP if we are assigning to a field
4512 which has been packed to exclude padding bits. */
4515 store_constructor (exp, target, align, cleared, size)
4522 tree type = TREE_TYPE (exp);
4523 #ifdef WORD_REGISTER_OPERATIONS
4524 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4527 /* We know our target cannot conflict, since safe_from_p has been called. */
4529 /* Don't try copying piece by piece into a hard register
4530 since that is vulnerable to being clobbered by EXP.
4531 Instead, construct in a pseudo register and then copy it all. */
4532 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4534 rtx temp = gen_reg_rtx (GET_MODE (target));
4535 store_constructor (exp, temp, align, cleared, size);
4536 emit_move_insn (target, temp);
4541 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4542 || TREE_CODE (type) == QUAL_UNION_TYPE)
4546 /* Inform later passes that the whole union value is dead. */
4547 if ((TREE_CODE (type) == UNION_TYPE
4548 || TREE_CODE (type) == QUAL_UNION_TYPE)
4551 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4553 /* If the constructor is empty, clear the union. */
4554 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4555 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4558 /* If we are building a static constructor into a register,
4559 set the initial value as zero so we can fold the value into
4560 a constant. But if more than one register is involved,
4561 this probably loses. */
4562 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4563 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4566 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4571 /* If the constructor has fewer fields than the structure
4572 or if we are initializing the structure to mostly zeros,
4573 clear the whole structure first. Don't do this if TARGET is a
4574 register whose mode size isn't equal to SIZE since clear_storage
4575 can't handle this case. */
4577 && ((list_length (CONSTRUCTOR_ELTS (exp))
4578 != fields_length (type))
4579 || mostly_zeros_p (exp))
4580 && (GET_CODE (target) != REG
4581 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4584 clear_storage (target, GEN_INT (size), align);
4589 /* Inform later passes that the old value is dead. */
4590 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4592 /* Store each element of the constructor into
4593 the corresponding field of TARGET. */
4595 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4597 register tree field = TREE_PURPOSE (elt);
4598 #ifdef WORD_REGISTER_OPERATIONS
4599 tree value = TREE_VALUE (elt);
4601 register enum machine_mode mode;
4602 HOST_WIDE_INT bitsize;
4603 HOST_WIDE_INT bitpos = 0;
4606 rtx to_rtx = target;
4608 /* Just ignore missing fields.
4609 We cleared the whole structure, above,
4610 if any fields are missing. */
4614 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4617 if (host_integerp (DECL_SIZE (field), 1))
4618 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4622 unsignedp = TREE_UNSIGNED (field);
4623 mode = DECL_MODE (field);
4624 if (DECL_BIT_FIELD (field))
4627 offset = DECL_FIELD_OFFSET (field);
4628 if (host_integerp (offset, 0)
4629 && host_integerp (bit_position (field), 0))
4631 bitpos = int_bit_position (field);
4635 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4641 if (contains_placeholder_p (offset))
4642 offset = build (WITH_RECORD_EXPR, sizetype,
4643 offset, make_tree (TREE_TYPE (exp), target));
4645 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4646 if (GET_CODE (to_rtx) != MEM)
4649 if (GET_MODE (offset_rtx) != ptr_mode)
4651 #ifdef POINTERS_EXTEND_UNSIGNED
4652 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4654 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4659 = change_address (to_rtx, VOIDmode,
4660 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4661 force_reg (ptr_mode,
4663 align = DECL_OFFSET_ALIGN (field);
4666 if (TREE_READONLY (field))
4668 if (GET_CODE (to_rtx) == MEM)
4669 to_rtx = copy_rtx (to_rtx);
4671 RTX_UNCHANGING_P (to_rtx) = 1;
4674 #ifdef WORD_REGISTER_OPERATIONS
4675 /* If this initializes a field that is smaller than a word, at the
4676 start of a word, try to widen it to a full word.
4677 This special case allows us to output C++ member function
4678 initializations in a form that the optimizers can understand. */
4679 if (GET_CODE (target) == REG
4680 && bitsize < BITS_PER_WORD
4681 && bitpos % BITS_PER_WORD == 0
4682 && GET_MODE_CLASS (mode) == MODE_INT
4683 && TREE_CODE (value) == INTEGER_CST
4685 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4687 tree type = TREE_TYPE (value);
4688 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4690 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4691 value = convert (type, value);
4693 if (BYTES_BIG_ENDIAN)
4695 = fold (build (LSHIFT_EXPR, type, value,
4696 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4697 bitsize = BITS_PER_WORD;
4701 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4702 TREE_VALUE (elt), type, align, cleared,
4703 (DECL_NONADDRESSABLE_P (field)
4704 && GET_CODE (to_rtx) == MEM)
4705 ? MEM_ALIAS_SET (to_rtx)
4706 : get_alias_set (TREE_TYPE (field)));
4709 else if (TREE_CODE (type) == ARRAY_TYPE)
4714 tree domain = TYPE_DOMAIN (type);
4715 tree elttype = TREE_TYPE (type);
4716 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4717 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4718 HOST_WIDE_INT minelt = 0;
4719 HOST_WIDE_INT maxelt = 0;
4721 /* If we have constant bounds for the range of the type, get them. */
4724 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4725 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4728 /* If the constructor has fewer elements than the array,
4729 clear the whole array first. Similarly if this is
4730 static constructor of a non-BLKmode object. */
4731 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4735 HOST_WIDE_INT count = 0, zero_count = 0;
4736 need_to_clear = ! const_bounds_p;
4738 /* This loop is a more accurate version of the loop in
4739 mostly_zeros_p (it handles RANGE_EXPR in an index).
4740 It is also needed to check for missing elements. */
4741 for (elt = CONSTRUCTOR_ELTS (exp);
4742 elt != NULL_TREE && ! need_to_clear;
4743 elt = TREE_CHAIN (elt))
4745 tree index = TREE_PURPOSE (elt);
4746 HOST_WIDE_INT this_node_count;
4748 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4750 tree lo_index = TREE_OPERAND (index, 0);
4751 tree hi_index = TREE_OPERAND (index, 1);
4753 if (! host_integerp (lo_index, 1)
4754 || ! host_integerp (hi_index, 1))
4760 this_node_count = (tree_low_cst (hi_index, 1)
4761 - tree_low_cst (lo_index, 1) + 1);
4764 this_node_count = 1;
4766 count += this_node_count;
4767 if (mostly_zeros_p (TREE_VALUE (elt)))
4768 zero_count += this_node_count;
4771 /* Clear the entire array first if there are any missing elements,
4772 or if the incidence of zero elements is >= 75%. */
4774 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4778 if (need_to_clear && size > 0)
4781 clear_storage (target, GEN_INT (size), align);
4784 else if (REG_P (target))
4785 /* Inform later passes that the old value is dead. */
4786 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4788 /* Store each element of the constructor into
4789 the corresponding element of TARGET, determined
4790 by counting the elements. */
4791 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4793 elt = TREE_CHAIN (elt), i++)
4795 register enum machine_mode mode;
4796 HOST_WIDE_INT bitsize;
4797 HOST_WIDE_INT bitpos;
4799 tree value = TREE_VALUE (elt);
4800 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4801 tree index = TREE_PURPOSE (elt);
4802 rtx xtarget = target;
4804 if (cleared && is_zeros_p (value))
4807 unsignedp = TREE_UNSIGNED (elttype);
4808 mode = TYPE_MODE (elttype);
4809 if (mode == BLKmode)
4810 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4811 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4814 bitsize = GET_MODE_BITSIZE (mode);
4816 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4818 tree lo_index = TREE_OPERAND (index, 0);
4819 tree hi_index = TREE_OPERAND (index, 1);
4820 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4821 struct nesting *loop;
4822 HOST_WIDE_INT lo, hi, count;
4825 /* If the range is constant and "small", unroll the loop. */
4827 && host_integerp (lo_index, 0)
4828 && host_integerp (hi_index, 0)
4829 && (lo = tree_low_cst (lo_index, 0),
4830 hi = tree_low_cst (hi_index, 0),
4831 count = hi - lo + 1,
4832 (GET_CODE (target) != MEM
4834 || (host_integerp (TYPE_SIZE (elttype), 1)
4835 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4838 lo -= minelt; hi -= minelt;
4839 for (; lo <= hi; lo++)
4841 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4842 store_constructor_field
4843 (target, bitsize, bitpos, mode, value, type, align,
4845 TYPE_NONALIASED_COMPONENT (type)
4846 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4851 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4852 loop_top = gen_label_rtx ();
4853 loop_end = gen_label_rtx ();
4855 unsignedp = TREE_UNSIGNED (domain);
4857 index = build_decl (VAR_DECL, NULL_TREE, domain);
4860 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4862 SET_DECL_RTL (index, index_r);
4863 if (TREE_CODE (value) == SAVE_EXPR
4864 && SAVE_EXPR_RTL (value) == 0)
4866 /* Make sure value gets expanded once before the
4868 expand_expr (value, const0_rtx, VOIDmode, 0);
4871 store_expr (lo_index, index_r, 0);
4872 loop = expand_start_loop (0);
4874 /* Assign value to element index. */
4876 = convert (ssizetype,
4877 fold (build (MINUS_EXPR, TREE_TYPE (index),
4878 index, TYPE_MIN_VALUE (domain))));
4879 position = size_binop (MULT_EXPR, position,
4881 TYPE_SIZE_UNIT (elttype)));
4883 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4884 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4885 xtarget = change_address (target, mode, addr);
4886 if (TREE_CODE (value) == CONSTRUCTOR)
4887 store_constructor (value, xtarget, align, cleared,
4888 bitsize / BITS_PER_UNIT);
4890 store_expr (value, xtarget, 0);
4892 expand_exit_loop_if_false (loop,
4893 build (LT_EXPR, integer_type_node,
4896 expand_increment (build (PREINCREMENT_EXPR,
4898 index, integer_one_node), 0, 0);
4900 emit_label (loop_end);
4903 else if ((index != 0 && ! host_integerp (index, 0))
4904 || ! host_integerp (TYPE_SIZE (elttype), 1))
4910 index = ssize_int (1);
4913 index = convert (ssizetype,
4914 fold (build (MINUS_EXPR, index,
4915 TYPE_MIN_VALUE (domain))));
4917 position = size_binop (MULT_EXPR, index,
4919 TYPE_SIZE_UNIT (elttype)));
4920 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4921 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4922 xtarget = change_address (target, mode, addr);
4923 store_expr (value, xtarget, 0);
4928 bitpos = ((tree_low_cst (index, 0) - minelt)
4929 * tree_low_cst (TYPE_SIZE (elttype), 1));
4931 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4933 store_constructor_field (target, bitsize, bitpos, mode, value,
4934 type, align, cleared,
4935 TYPE_NONALIASED_COMPONENT (type)
4936 && GET_CODE (target) == MEM
4937 ? MEM_ALIAS_SET (target) :
4938 get_alias_set (elttype));
4944 /* Set constructor assignments. */
4945 else if (TREE_CODE (type) == SET_TYPE)
4947 tree elt = CONSTRUCTOR_ELTS (exp);
4948 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4949 tree domain = TYPE_DOMAIN (type);
4950 tree domain_min, domain_max, bitlength;
4952 /* The default implementation strategy is to extract the constant
4953 parts of the constructor, use that to initialize the target,
4954 and then "or" in whatever non-constant ranges we need in addition.
4956 If a large set is all zero or all ones, it is
4957 probably better to set it using memset (if available) or bzero.
4958 Also, if a large set has just a single range, it may also be
4959 better to first clear all the first clear the set (using
4960 bzero/memset), and set the bits we want. */
4962 /* Check for all zeros. */
4963 if (elt == NULL_TREE && size > 0)
4966 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4970 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4971 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4972 bitlength = size_binop (PLUS_EXPR,
4973 size_diffop (domain_max, domain_min),
4976 nbits = tree_low_cst (bitlength, 1);
4978 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4979 are "complicated" (more than one range), initialize (the
4980 constant parts) by copying from a constant. */
4981 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4982 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4984 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4985 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4986 char *bit_buffer = (char *) alloca (nbits);
4987 HOST_WIDE_INT word = 0;
4988 unsigned int bit_pos = 0;
4989 unsigned int ibit = 0;
4990 unsigned int offset = 0; /* In bytes from beginning of set. */
4992 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4995 if (bit_buffer[ibit])
4997 if (BYTES_BIG_ENDIAN)
4998 word |= (1 << (set_word_size - 1 - bit_pos));
5000 word |= 1 << bit_pos;
5004 if (bit_pos >= set_word_size || ibit == nbits)
5006 if (word != 0 || ! cleared)
5008 rtx datum = GEN_INT (word);
5011 /* The assumption here is that it is safe to use
5012 XEXP if the set is multi-word, but not if
5013 it's single-word. */
5014 if (GET_CODE (target) == MEM)
5015 to_rtx = adjust_address (target, mode, offset);
5016 else if (offset == 0)
5020 emit_move_insn (to_rtx, datum);
5027 offset += set_word_size / BITS_PER_UNIT;
5032 /* Don't bother clearing storage if the set is all ones. */
5033 if (TREE_CHAIN (elt) != NULL_TREE
5034 || (TREE_PURPOSE (elt) == NULL_TREE
5036 : ( ! host_integerp (TREE_VALUE (elt), 0)
5037 || ! host_integerp (TREE_PURPOSE (elt), 0)
5038 || (tree_low_cst (TREE_VALUE (elt), 0)
5039 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5040 != (HOST_WIDE_INT) nbits))))
5041 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5043 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5045 /* Start of range of element or NULL. */
5046 tree startbit = TREE_PURPOSE (elt);
5047 /* End of range of element, or element value. */
5048 tree endbit = TREE_VALUE (elt);
5049 #ifdef TARGET_MEM_FUNCTIONS
5050 HOST_WIDE_INT startb, endb;
5052 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5054 bitlength_rtx = expand_expr (bitlength,
5055 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5057 /* Handle non-range tuple element like [ expr ]. */
5058 if (startbit == NULL_TREE)
5060 startbit = save_expr (endbit);
5064 startbit = convert (sizetype, startbit);
5065 endbit = convert (sizetype, endbit);
5066 if (! integer_zerop (domain_min))
5068 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5069 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5071 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5072 EXPAND_CONST_ADDRESS);
5073 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5074 EXPAND_CONST_ADDRESS);
5080 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5083 emit_move_insn (targetx, target);
5086 else if (GET_CODE (target) == MEM)
5091 #ifdef TARGET_MEM_FUNCTIONS
5092 /* Optimization: If startbit and endbit are
5093 constants divisible by BITS_PER_UNIT,
5094 call memset instead. */
5095 if (TREE_CODE (startbit) == INTEGER_CST
5096 && TREE_CODE (endbit) == INTEGER_CST
5097 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5098 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5100 emit_library_call (memset_libfunc, LCT_NORMAL,
5102 plus_constant (XEXP (targetx, 0),
5103 startb / BITS_PER_UNIT),
5105 constm1_rtx, TYPE_MODE (integer_type_node),
5106 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5107 TYPE_MODE (sizetype));
5111 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5112 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5113 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5114 startbit_rtx, TYPE_MODE (sizetype),
5115 endbit_rtx, TYPE_MODE (sizetype));
5118 emit_move_insn (target, targetx);
5126 /* Store the value of EXP (an expression tree)
5127 into a subfield of TARGET which has mode MODE and occupies
5128 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5129 If MODE is VOIDmode, it means that we are storing into a bit-field.
5131 If VALUE_MODE is VOIDmode, return nothing in particular.
5132 UNSIGNEDP is not used in this case.
5134 Otherwise, return an rtx for the value stored. This rtx
5135 has mode VALUE_MODE if that is convenient to do.
5136 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5138 ALIGN is the alignment that TARGET is known to have.
5139 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5141 ALIAS_SET is the alias set for the destination. This value will
5142 (in general) be different from that for TARGET, since TARGET is a
5143 reference to the containing structure. */
5146 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5147 unsignedp, align, total_size, alias_set)
5149 HOST_WIDE_INT bitsize;
5150 HOST_WIDE_INT bitpos;
5151 enum machine_mode mode;
5153 enum machine_mode value_mode;
5156 HOST_WIDE_INT total_size;
5159 HOST_WIDE_INT width_mask = 0;
5161 if (TREE_CODE (exp) == ERROR_MARK)
5164 /* If we have nothing to store, do nothing unless the expression has
5167 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5169 if (bitsize < HOST_BITS_PER_WIDE_INT)
5170 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5172 /* If we are storing into an unaligned field of an aligned union that is
5173 in a register, we may have the mode of TARGET being an integer mode but
5174 MODE == BLKmode. In that case, get an aligned object whose size and
5175 alignment are the same as TARGET and store TARGET into it (we can avoid
5176 the store if the field being stored is the entire width of TARGET). Then
5177 call ourselves recursively to store the field into a BLKmode version of
5178 that object. Finally, load from the object into TARGET. This is not
5179 very efficient in general, but should only be slightly more expensive
5180 than the otherwise-required unaligned accesses. Perhaps this can be
5181 cleaned up later. */
5184 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5188 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5191 rtx blk_object = copy_rtx (object);
5193 PUT_MODE (blk_object, BLKmode);
5195 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5196 emit_move_insn (object, target);
5198 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5199 align, total_size, alias_set);
5201 /* Even though we aren't returning target, we need to
5202 give it the updated value. */
5203 emit_move_insn (target, object);
5208 if (GET_CODE (target) == CONCAT)
5210 /* We're storing into a struct containing a single __complex. */
5214 return store_expr (exp, target, 0);
5217 /* If the structure is in a register or if the component
5218 is a bit field, we cannot use addressing to access it.
5219 Use bit-field techniques or SUBREG to store in it. */
5221 if (mode == VOIDmode
5222 || (mode != BLKmode && ! direct_store[(int) mode]
5223 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5224 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5225 || GET_CODE (target) == REG
5226 || GET_CODE (target) == SUBREG
5227 /* If the field isn't aligned enough to store as an ordinary memref,
5228 store it as a bit field. */
5229 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5230 && (align < GET_MODE_ALIGNMENT (mode)
5231 || bitpos % GET_MODE_ALIGNMENT (mode)))
5232 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5233 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5234 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5235 /* If the RHS and field are a constant size and the size of the
5236 RHS isn't the same size as the bitfield, we must use bitfield
5239 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5240 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5242 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5244 /* If BITSIZE is narrower than the size of the type of EXP
5245 we will be narrowing TEMP. Normally, what's wanted are the
5246 low-order bits. However, if EXP's type is a record and this is
5247 big-endian machine, we want the upper BITSIZE bits. */
5248 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5249 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5250 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5251 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5252 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5256 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5258 if (mode != VOIDmode && mode != BLKmode
5259 && mode != TYPE_MODE (TREE_TYPE (exp)))
5260 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5262 /* If the modes of TARGET and TEMP are both BLKmode, both
5263 must be in memory and BITPOS must be aligned on a byte
5264 boundary. If so, we simply do a block copy. */
5265 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5267 unsigned int exp_align = expr_align (exp);
5269 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5270 || bitpos % BITS_PER_UNIT != 0)
5273 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5275 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5276 align = MIN (exp_align, align);
5278 /* Find an alignment that is consistent with the bit position. */
5279 while ((bitpos % align) != 0)
5282 emit_block_move (target, temp,
5283 bitsize == -1 ? expr_size (exp)
5284 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5288 return value_mode == VOIDmode ? const0_rtx : target;
5291 /* Store the value in the bitfield. */
5292 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5293 if (value_mode != VOIDmode)
5295 /* The caller wants an rtx for the value. */
5296 /* If possible, avoid refetching from the bitfield itself. */
5298 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5301 enum machine_mode tmode;
5304 return expand_and (temp,
5308 GET_MODE (temp) == VOIDmode
5310 : GET_MODE (temp))), NULL_RTX);
5311 tmode = GET_MODE (temp);
5312 if (tmode == VOIDmode)
5314 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5315 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5316 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5318 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5319 NULL_RTX, value_mode, 0, align,
5326 rtx addr = XEXP (target, 0);
5329 /* If a value is wanted, it must be the lhs;
5330 so make the address stable for multiple use. */
5332 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5333 && ! CONSTANT_ADDRESS_P (addr)
5334 /* A frame-pointer reference is already stable. */
5335 && ! (GET_CODE (addr) == PLUS
5336 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5337 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5338 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5339 target = replace_equiv_address (target, copy_to_reg (addr));
5341 /* Now build a reference to just the desired component. */
5343 to_rtx = copy_rtx (adjust_address (target, mode,
5344 bitpos / BITS_PER_UNIT));
5346 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5347 /* If the address of the structure varies, then it might be on
5348 the stack. And, stack slots may be shared across scopes.
5349 So, two different structures, of different types, can end up
5350 at the same location. We will give the structures alias set
5351 zero; here we must be careful not to give non-zero alias sets
5353 set_mem_alias_set (to_rtx,
5354 rtx_varies_p (addr, /*for_alias=*/0)
5357 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5361 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5362 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5363 codes and find the ultimate containing object, which we return.
5365 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5366 bit position, and *PUNSIGNEDP to the signedness of the field.
5367 If the position of the field is variable, we store a tree
5368 giving the variable offset (in units) in *POFFSET.
5369 This offset is in addition to the bit position.
5370 If the position is not variable, we store 0 in *POFFSET.
5371 We set *PALIGNMENT to the alignment of the address that will be
5372 computed. This is the alignment of the thing we return if *POFFSET
5373 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5375 If any of the extraction expressions is volatile,
5376 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5378 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5379 is a mode that can be used to access the field. In that case, *PBITSIZE
5382 If the field describes a variable-sized object, *PMODE is set to
5383 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5384 this case, but the address of the object can be found. */
5387 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5388 punsignedp, pvolatilep, palignment)
5390 HOST_WIDE_INT *pbitsize;
5391 HOST_WIDE_INT *pbitpos;
5393 enum machine_mode *pmode;
5396 unsigned int *palignment;
5399 enum machine_mode mode = VOIDmode;
5400 tree offset = size_zero_node;
5401 tree bit_offset = bitsize_zero_node;
5402 unsigned int alignment = BIGGEST_ALIGNMENT;
5405 /* First get the mode, signedness, and size. We do this from just the
5406 outermost expression. */
5407 if (TREE_CODE (exp) == COMPONENT_REF)
5409 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5410 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5411 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5413 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5415 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5417 size_tree = TREE_OPERAND (exp, 1);
5418 *punsignedp = TREE_UNSIGNED (exp);
5422 mode = TYPE_MODE (TREE_TYPE (exp));
5423 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5425 if (mode == BLKmode)
5426 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5428 *pbitsize = GET_MODE_BITSIZE (mode);
5433 if (! host_integerp (size_tree, 1))
5434 mode = BLKmode, *pbitsize = -1;
5436 *pbitsize = tree_low_cst (size_tree, 1);
5439 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5440 and find the ultimate containing object. */
5443 if (TREE_CODE (exp) == BIT_FIELD_REF)
5444 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5445 else if (TREE_CODE (exp) == COMPONENT_REF)
5447 tree field = TREE_OPERAND (exp, 1);
5448 tree this_offset = DECL_FIELD_OFFSET (field);
5450 /* If this field hasn't been filled in yet, don't go
5451 past it. This should only happen when folding expressions
5452 made during type construction. */
5453 if (this_offset == 0)
5455 else if (! TREE_CONSTANT (this_offset)
5456 && contains_placeholder_p (this_offset))
5457 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5459 offset = size_binop (PLUS_EXPR, offset, this_offset);
5460 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5461 DECL_FIELD_BIT_OFFSET (field));
5463 if (! host_integerp (offset, 0))
5464 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5467 else if (TREE_CODE (exp) == ARRAY_REF
5468 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5470 tree index = TREE_OPERAND (exp, 1);
5471 tree array = TREE_OPERAND (exp, 0);
5472 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5473 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5474 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5476 /* We assume all arrays have sizes that are a multiple of a byte.
5477 First subtract the lower bound, if any, in the type of the
5478 index, then convert to sizetype and multiply by the size of the
5480 if (low_bound != 0 && ! integer_zerop (low_bound))
5481 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5484 /* If the index has a self-referential type, pass it to a
5485 WITH_RECORD_EXPR; if the component size is, pass our
5486 component to one. */
5487 if (! TREE_CONSTANT (index)
5488 && contains_placeholder_p (index))
5489 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5490 if (! TREE_CONSTANT (unit_size)
5491 && contains_placeholder_p (unit_size))
5492 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5494 offset = size_binop (PLUS_EXPR, offset,
5495 size_binop (MULT_EXPR,
5496 convert (sizetype, index),
5500 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5501 && ! ((TREE_CODE (exp) == NOP_EXPR
5502 || TREE_CODE (exp) == CONVERT_EXPR)
5503 && (TYPE_MODE (TREE_TYPE (exp))
5504 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5507 /* If any reference in the chain is volatile, the effect is volatile. */
5508 if (TREE_THIS_VOLATILE (exp))
5511 /* If the offset is non-constant already, then we can't assume any
5512 alignment more than the alignment here. */
5513 if (! TREE_CONSTANT (offset))
5514 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5516 exp = TREE_OPERAND (exp, 0);
5520 alignment = MIN (alignment, DECL_ALIGN (exp));
5521 else if (TREE_TYPE (exp) != 0)
5522 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5524 /* If OFFSET is constant, see if we can return the whole thing as a
5525 constant bit position. Otherwise, split it up. */
5526 if (host_integerp (offset, 0)
5527 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5529 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5530 && host_integerp (tem, 0))
5531 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5533 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5536 *palignment = alignment;
5540 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5542 static enum memory_use_mode
5543 get_memory_usage_from_modifier (modifier)
5544 enum expand_modifier modifier;
5550 return MEMORY_USE_RO;
5552 case EXPAND_MEMORY_USE_WO:
5553 return MEMORY_USE_WO;
5555 case EXPAND_MEMORY_USE_RW:
5556 return MEMORY_USE_RW;
5558 case EXPAND_MEMORY_USE_DONT:
5559 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5560 MEMORY_USE_DONT, because they are modifiers to a call of
5561 expand_expr in the ADDR_EXPR case of expand_expr. */
5562 case EXPAND_CONST_ADDRESS:
5563 case EXPAND_INITIALIZER:
5564 return MEMORY_USE_DONT;
5565 case EXPAND_MEMORY_USE_BAD:
5571 /* Given an rtx VALUE that may contain additions and multiplications, return
5572 an equivalent value that just refers to a register, memory, or constant.
5573 This is done by generating instructions to perform the arithmetic and
5574 returning a pseudo-register containing the value.
5576 The returned value may be a REG, SUBREG, MEM or constant. */
5579 force_operand (value, target)
5582 register optab binoptab = 0;
5583 /* Use a temporary to force order of execution of calls to
5587 /* Use subtarget as the target for operand 0 of a binary operation. */
5588 register rtx subtarget = get_subtarget (target);
5590 /* Check for a PIC address load. */
5592 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5593 && XEXP (value, 0) == pic_offset_table_rtx
5594 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5595 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5596 || GET_CODE (XEXP (value, 1)) == CONST))
5599 subtarget = gen_reg_rtx (GET_MODE (value));
5600 emit_move_insn (subtarget, value);
5604 if (GET_CODE (value) == PLUS)
5605 binoptab = add_optab;
5606 else if (GET_CODE (value) == MINUS)
5607 binoptab = sub_optab;
5608 else if (GET_CODE (value) == MULT)
5610 op2 = XEXP (value, 1);
5611 if (!CONSTANT_P (op2)
5612 && !(GET_CODE (op2) == REG && op2 != subtarget))
5614 tmp = force_operand (XEXP (value, 0), subtarget);
5615 return expand_mult (GET_MODE (value), tmp,
5616 force_operand (op2, NULL_RTX),
5622 op2 = XEXP (value, 1);
5623 if (!CONSTANT_P (op2)
5624 && !(GET_CODE (op2) == REG && op2 != subtarget))
5626 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5628 binoptab = add_optab;
5629 op2 = negate_rtx (GET_MODE (value), op2);
5632 /* Check for an addition with OP2 a constant integer and our first
5633 operand a PLUS of a virtual register and something else. In that
5634 case, we want to emit the sum of the virtual register and the
5635 constant first and then add the other value. This allows virtual
5636 register instantiation to simply modify the constant rather than
5637 creating another one around this addition. */
5638 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5639 && GET_CODE (XEXP (value, 0)) == PLUS
5640 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5641 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5642 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5644 rtx temp = expand_binop (GET_MODE (value), binoptab,
5645 XEXP (XEXP (value, 0), 0), op2,
5646 subtarget, 0, OPTAB_LIB_WIDEN);
5647 return expand_binop (GET_MODE (value), binoptab, temp,
5648 force_operand (XEXP (XEXP (value, 0), 1), 0),
5649 target, 0, OPTAB_LIB_WIDEN);
5652 tmp = force_operand (XEXP (value, 0), subtarget);
5653 return expand_binop (GET_MODE (value), binoptab, tmp,
5654 force_operand (op2, NULL_RTX),
5655 target, 0, OPTAB_LIB_WIDEN);
5656 /* We give UNSIGNEDP = 0 to expand_binop
5657 because the only operations we are expanding here are signed ones. */
5662 /* Subroutine of expand_expr:
5663 save the non-copied parts (LIST) of an expr (LHS), and return a list
5664 which can restore these values to their previous values,
5665 should something modify their storage. */
5668 save_noncopied_parts (lhs, list)
5675 for (tail = list; tail; tail = TREE_CHAIN (tail))
5676 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5677 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5680 tree part = TREE_VALUE (tail);
5681 tree part_type = TREE_TYPE (part);
5682 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5684 = assign_temp (build_qualified_type (part_type,
5685 (TYPE_QUALS (part_type)
5686 | TYPE_QUAL_CONST)),
5689 parts = tree_cons (to_be_saved,
5690 build (RTL_EXPR, part_type, NULL_TREE,
5691 (tree) validize_mem (target)),
5693 store_expr (TREE_PURPOSE (parts),
5694 RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5699 /* Subroutine of expand_expr:
5700 record the non-copied parts (LIST) of an expr (LHS), and return a list
5701 which specifies the initial values of these parts. */
5704 init_noncopied_parts (lhs, list)
5711 for (tail = list; tail; tail = TREE_CHAIN (tail))
5712 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5713 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5714 else if (TREE_PURPOSE (tail))
5716 tree part = TREE_VALUE (tail);
5717 tree part_type = TREE_TYPE (part);
5718 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5719 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5724 /* Subroutine of expand_expr: return nonzero iff there is no way that
5725 EXP can reference X, which is being modified. TOP_P is nonzero if this
5726 call is going to be used to determine whether we need a temporary
5727 for EXP, as opposed to a recursive call to this function.
5729 It is always safe for this routine to return zero since it merely
5730 searches for optimization opportunities. */
5733 safe_from_p (x, exp, top_p)
5740 static tree save_expr_list;
5743 /* If EXP has varying size, we MUST use a target since we currently
5744 have no way of allocating temporaries of variable size
5745 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5746 So we assume here that something at a higher level has prevented a
5747 clash. This is somewhat bogus, but the best we can do. Only
5748 do this when X is BLKmode and when we are at the top level. */
5749 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5750 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5751 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5752 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5753 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5755 && GET_MODE (x) == BLKmode)
5756 /* If X is in the outgoing argument area, it is always safe. */
5757 || (GET_CODE (x) == MEM
5758 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5759 || (GET_CODE (XEXP (x, 0)) == PLUS
5760 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5763 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5764 find the underlying pseudo. */
5765 if (GET_CODE (x) == SUBREG)
5768 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5772 /* A SAVE_EXPR might appear many times in the expression passed to the
5773 top-level safe_from_p call, and if it has a complex subexpression,
5774 examining it multiple times could result in a combinatorial explosion.
5775 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5776 with optimization took about 28 minutes to compile -- even though it was
5777 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5778 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5779 we have processed. Note that the only test of top_p was above. */
5788 rtn = safe_from_p (x, exp, 0);
5790 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5791 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5796 /* Now look at our tree code and possibly recurse. */
5797 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5800 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5807 if (TREE_CODE (exp) == TREE_LIST)
5808 return ((TREE_VALUE (exp) == 0
5809 || safe_from_p (x, TREE_VALUE (exp), 0))
5810 && (TREE_CHAIN (exp) == 0
5811 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5812 else if (TREE_CODE (exp) == ERROR_MARK)
5813 return 1; /* An already-visited SAVE_EXPR? */
5818 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5822 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5823 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5827 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5828 the expression. If it is set, we conflict iff we are that rtx or
5829 both are in memory. Otherwise, we check all operands of the
5830 expression recursively. */
5832 switch (TREE_CODE (exp))
5835 return (staticp (TREE_OPERAND (exp, 0))
5836 || TREE_STATIC (exp)
5837 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5840 if (GET_CODE (x) == MEM
5841 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5842 get_alias_set (exp)))
5847 /* Assume that the call will clobber all hard registers and
5849 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5850 || GET_CODE (x) == MEM)
5855 /* If a sequence exists, we would have to scan every instruction
5856 in the sequence to see if it was safe. This is probably not
5858 if (RTL_EXPR_SEQUENCE (exp))
5861 exp_rtl = RTL_EXPR_RTL (exp);
5864 case WITH_CLEANUP_EXPR:
5865 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5868 case CLEANUP_POINT_EXPR:
5869 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5872 exp_rtl = SAVE_EXPR_RTL (exp);
5876 /* If we've already scanned this, don't do it again. Otherwise,
5877 show we've scanned it and record for clearing the flag if we're
5879 if (TREE_PRIVATE (exp))
5882 TREE_PRIVATE (exp) = 1;
5883 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5885 TREE_PRIVATE (exp) = 0;
5889 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5893 /* The only operand we look at is operand 1. The rest aren't
5894 part of the expression. */
5895 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5897 case METHOD_CALL_EXPR:
5898 /* This takes a rtx argument, but shouldn't appear here. */
5905 /* If we have an rtx, we do not need to scan our operands. */
5909 nops = first_rtl_op (TREE_CODE (exp));
5910 for (i = 0; i < nops; i++)
5911 if (TREE_OPERAND (exp, i) != 0
5912 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5915 /* If this is a language-specific tree code, it may require
5916 special handling. */
5917 if ((unsigned int) TREE_CODE (exp)
5918 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5920 && !(*lang_safe_from_p) (x, exp))
5924 /* If we have an rtl, find any enclosed object. Then see if we conflict
5928 if (GET_CODE (exp_rtl) == SUBREG)
5930 exp_rtl = SUBREG_REG (exp_rtl);
5931 if (GET_CODE (exp_rtl) == REG
5932 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5936 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5937 are memory and they conflict. */
5938 return ! (rtx_equal_p (x, exp_rtl)
5939 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5940 && true_dependence (exp_rtl, GET_MODE (x), x,
5941 rtx_addr_varies_p)));
5944 /* If we reach here, it is safe. */
5948 /* Subroutine of expand_expr: return nonzero iff EXP is an
5949 expression whose type is statically determinable. */
5955 if (TREE_CODE (exp) == PARM_DECL
5956 || TREE_CODE (exp) == VAR_DECL
5957 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5958 || TREE_CODE (exp) == COMPONENT_REF
5959 || TREE_CODE (exp) == ARRAY_REF)
5964 /* Subroutine of expand_expr: return rtx if EXP is a
5965 variable or parameter; else return 0. */
5972 switch (TREE_CODE (exp))
5976 return DECL_RTL (exp);
5982 #ifdef MAX_INTEGER_COMPUTATION_MODE
5985 check_max_integer_computation_mode (exp)
5988 enum tree_code code;
5989 enum machine_mode mode;
5991 /* Strip any NOPs that don't change the mode. */
5993 code = TREE_CODE (exp);
5995 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5996 if (code == NOP_EXPR
5997 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6000 /* First check the type of the overall operation. We need only look at
6001 unary, binary and relational operations. */
6002 if (TREE_CODE_CLASS (code) == '1'
6003 || TREE_CODE_CLASS (code) == '2'
6004 || TREE_CODE_CLASS (code) == '<')
6006 mode = TYPE_MODE (TREE_TYPE (exp));
6007 if (GET_MODE_CLASS (mode) == MODE_INT
6008 && mode > MAX_INTEGER_COMPUTATION_MODE)
6009 internal_error ("unsupported wide integer operation");
6012 /* Check operand of a unary op. */
6013 if (TREE_CODE_CLASS (code) == '1')
6015 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6016 if (GET_MODE_CLASS (mode) == MODE_INT
6017 && mode > MAX_INTEGER_COMPUTATION_MODE)
6018 internal_error ("unsupported wide integer operation");
6021 /* Check operands of a binary/comparison op. */
6022 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6024 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6025 if (GET_MODE_CLASS (mode) == MODE_INT
6026 && mode > MAX_INTEGER_COMPUTATION_MODE)
6027 internal_error ("unsupported wide integer operation");
6029 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6030 if (GET_MODE_CLASS (mode) == MODE_INT
6031 && mode > MAX_INTEGER_COMPUTATION_MODE)
6032 internal_error ("unsupported wide integer operation");
6037 /* expand_expr: generate code for computing expression EXP.
6038 An rtx for the computed value is returned. The value is never null.
6039 In the case of a void EXP, const0_rtx is returned.
6041 The value may be stored in TARGET if TARGET is nonzero.
6042 TARGET is just a suggestion; callers must assume that
6043 the rtx returned may not be the same as TARGET.
6045 If TARGET is CONST0_RTX, it means that the value will be ignored.
6047 If TMODE is not VOIDmode, it suggests generating the
6048 result in mode TMODE. But this is done only when convenient.
6049 Otherwise, TMODE is ignored and the value generated in its natural mode.
6050 TMODE is just a suggestion; callers must assume that
6051 the rtx returned may not have mode TMODE.
6053 Note that TARGET may have neither TMODE nor MODE. In that case, it
6054 probably will not be used.
6056 If MODIFIER is EXPAND_SUM then when EXP is an addition
6057 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6058 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6059 products as above, or REG or MEM, or constant.
6060 Ordinarily in such cases we would output mul or add instructions
6061 and then return a pseudo reg containing the sum.
6063 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6064 it also marks a label as absolutely required (it can't be dead).
6065 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6066 This is used for outputting expressions used in initializers.
6068 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6069 with a constant address even if that address is not normally legitimate.
6070 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6073 expand_expr (exp, target, tmode, modifier)
6076 enum machine_mode tmode;
6077 enum expand_modifier modifier;
6079 register rtx op0, op1, temp;
6080 tree type = TREE_TYPE (exp);
6081 int unsignedp = TREE_UNSIGNED (type);
6082 register enum machine_mode mode;
6083 register enum tree_code code = TREE_CODE (exp);
6085 rtx subtarget, original_target;
6088 /* Used by check-memory-usage to make modifier read only. */
6089 enum expand_modifier ro_modifier;
6091 /* Handle ERROR_MARK before anybody tries to access its type. */
6092 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6094 op0 = CONST0_RTX (tmode);
6100 mode = TYPE_MODE (type);
6101 /* Use subtarget as the target for operand 0 of a binary operation. */
6102 subtarget = get_subtarget (target);
6103 original_target = target;
6104 ignore = (target == const0_rtx
6105 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6106 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6107 || code == COND_EXPR)
6108 && TREE_CODE (type) == VOID_TYPE));
6110 /* Make a read-only version of the modifier. */
6111 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6112 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6113 ro_modifier = modifier;
6115 ro_modifier = EXPAND_NORMAL;
6117 /* If we are going to ignore this result, we need only do something
6118 if there is a side-effect somewhere in the expression. If there
6119 is, short-circuit the most common cases here. Note that we must
6120 not call expand_expr with anything but const0_rtx in case this
6121 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6125 if (! TREE_SIDE_EFFECTS (exp))
6128 /* Ensure we reference a volatile object even if value is ignored, but
6129 don't do this if all we are doing is taking its address. */
6130 if (TREE_THIS_VOLATILE (exp)
6131 && TREE_CODE (exp) != FUNCTION_DECL
6132 && mode != VOIDmode && mode != BLKmode
6133 && modifier != EXPAND_CONST_ADDRESS)
6135 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6136 if (GET_CODE (temp) == MEM)
6137 temp = copy_to_reg (temp);
6141 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6142 || code == INDIRECT_REF || code == BUFFER_REF)
6143 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6144 VOIDmode, ro_modifier);
6145 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6146 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6148 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6150 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6154 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6155 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6156 /* If the second operand has no side effects, just evaluate
6158 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6159 VOIDmode, ro_modifier);
6160 else if (code == BIT_FIELD_REF)
6162 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6164 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6166 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6174 #ifdef MAX_INTEGER_COMPUTATION_MODE
6175 /* Only check stuff here if the mode we want is different from the mode
6176 of the expression; if it's the same, check_max_integer_computiation_mode
6177 will handle it. Do we really need to check this stuff at all? */
6180 && GET_MODE (target) != mode
6181 && TREE_CODE (exp) != INTEGER_CST
6182 && TREE_CODE (exp) != PARM_DECL
6183 && TREE_CODE (exp) != ARRAY_REF
6184 && TREE_CODE (exp) != ARRAY_RANGE_REF
6185 && TREE_CODE (exp) != COMPONENT_REF
6186 && TREE_CODE (exp) != BIT_FIELD_REF
6187 && TREE_CODE (exp) != INDIRECT_REF
6188 && TREE_CODE (exp) != CALL_EXPR
6189 && TREE_CODE (exp) != VAR_DECL
6190 && TREE_CODE (exp) != RTL_EXPR)
6192 enum machine_mode mode = GET_MODE (target);
6194 if (GET_MODE_CLASS (mode) == MODE_INT
6195 && mode > MAX_INTEGER_COMPUTATION_MODE)
6196 internal_error ("unsupported wide integer operation");
6200 && TREE_CODE (exp) != INTEGER_CST
6201 && TREE_CODE (exp) != PARM_DECL
6202 && TREE_CODE (exp) != ARRAY_REF
6203 && TREE_CODE (exp) != ARRAY_RANGE_REF
6204 && TREE_CODE (exp) != COMPONENT_REF
6205 && TREE_CODE (exp) != BIT_FIELD_REF
6206 && TREE_CODE (exp) != INDIRECT_REF
6207 && TREE_CODE (exp) != VAR_DECL
6208 && TREE_CODE (exp) != CALL_EXPR
6209 && TREE_CODE (exp) != RTL_EXPR
6210 && GET_MODE_CLASS (tmode) == MODE_INT
6211 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6212 internal_error ("unsupported wide integer operation");
6214 check_max_integer_computation_mode (exp);
6217 /* If will do cse, generate all results into pseudo registers
6218 since 1) that allows cse to find more things
6219 and 2) otherwise cse could produce an insn the machine
6222 if (! cse_not_expected && mode != BLKmode && target
6223 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6230 tree function = decl_function_context (exp);
6231 /* Handle using a label in a containing function. */
6232 if (function != current_function_decl
6233 && function != inline_function_decl && function != 0)
6235 struct function *p = find_function_data (function);
6236 p->expr->x_forced_labels
6237 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6238 p->expr->x_forced_labels);
6242 if (modifier == EXPAND_INITIALIZER)
6243 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6248 temp = gen_rtx_MEM (FUNCTION_MODE,
6249 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6250 if (function != current_function_decl
6251 && function != inline_function_decl && function != 0)
6252 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6257 if (DECL_RTL (exp) == 0)
6259 error_with_decl (exp, "prior parameter's size depends on `%s'");
6260 return CONST0_RTX (mode);
6263 /* ... fall through ... */
6266 /* If a static var's type was incomplete when the decl was written,
6267 but the type is complete now, lay out the decl now. */
6268 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6269 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6271 layout_decl (exp, 0);
6272 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6275 /* Although static-storage variables start off initialized, according to
6276 ANSI C, a memcpy could overwrite them with uninitialized values. So
6277 we check them too. This also lets us check for read-only variables
6278 accessed via a non-const declaration, in case it won't be detected
6279 any other way (e.g., in an embedded system or OS kernel without
6282 Aggregates are not checked here; they're handled elsewhere. */
6283 if (cfun && current_function_check_memory_usage
6285 && GET_CODE (DECL_RTL (exp)) == MEM
6286 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6288 enum memory_use_mode memory_usage;
6289 memory_usage = get_memory_usage_from_modifier (modifier);
6291 in_check_memory_usage = 1;
6292 if (memory_usage != MEMORY_USE_DONT)
6293 emit_library_call (chkr_check_addr_libfunc,
6294 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6295 XEXP (DECL_RTL (exp), 0), Pmode,
6296 GEN_INT (int_size_in_bytes (type)),
6297 TYPE_MODE (sizetype),
6298 GEN_INT (memory_usage),
6299 TYPE_MODE (integer_type_node));
6300 in_check_memory_usage = 0;
6303 /* ... fall through ... */
6307 if (DECL_RTL (exp) == 0)
6310 /* Ensure variable marked as used even if it doesn't go through
6311 a parser. If it hasn't be used yet, write out an external
6313 if (! TREE_USED (exp))
6315 assemble_external (exp);
6316 TREE_USED (exp) = 1;
6319 /* Show we haven't gotten RTL for this yet. */
6322 /* Handle variables inherited from containing functions. */
6323 context = decl_function_context (exp);
6325 /* We treat inline_function_decl as an alias for the current function
6326 because that is the inline function whose vars, types, etc.
6327 are being merged into the current function.
6328 See expand_inline_function. */
6330 if (context != 0 && context != current_function_decl
6331 && context != inline_function_decl
6332 /* If var is static, we don't need a static chain to access it. */
6333 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6334 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6338 /* Mark as non-local and addressable. */
6339 DECL_NONLOCAL (exp) = 1;
6340 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6342 mark_addressable (exp);
6343 if (GET_CODE (DECL_RTL (exp)) != MEM)
6345 addr = XEXP (DECL_RTL (exp), 0);
6346 if (GET_CODE (addr) == MEM)
6348 = replace_equiv_address (addr,
6349 fix_lexical_addr (XEXP (addr, 0), exp));
6351 addr = fix_lexical_addr (addr, exp);
6353 temp = replace_equiv_address (DECL_RTL (exp), addr);
6356 /* This is the case of an array whose size is to be determined
6357 from its initializer, while the initializer is still being parsed.
6360 else if (GET_CODE (DECL_RTL (exp)) == MEM
6361 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6362 temp = validize_mem (DECL_RTL (exp));
6364 /* If DECL_RTL is memory, we are in the normal case and either
6365 the address is not valid or it is not a register and -fforce-addr
6366 is specified, get the address into a register. */
6368 else if (GET_CODE (DECL_RTL (exp)) == MEM
6369 && modifier != EXPAND_CONST_ADDRESS
6370 && modifier != EXPAND_SUM
6371 && modifier != EXPAND_INITIALIZER
6372 && (! memory_address_p (DECL_MODE (exp),
6373 XEXP (DECL_RTL (exp), 0))
6375 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6376 temp = replace_equiv_address (DECL_RTL (exp),
6377 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6379 /* If we got something, return it. But first, set the alignment
6380 if the address is a register. */
6383 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6384 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6389 /* If the mode of DECL_RTL does not match that of the decl, it
6390 must be a promoted value. We return a SUBREG of the wanted mode,
6391 but mark it so that we know that it was already extended. */
6393 if (GET_CODE (DECL_RTL (exp)) == REG
6394 && GET_MODE (DECL_RTL (exp)) != mode)
6396 /* Get the signedness used for this variable. Ensure we get the
6397 same mode we got when the variable was declared. */
6398 if (GET_MODE (DECL_RTL (exp))
6399 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6402 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6403 SUBREG_PROMOTED_VAR_P (temp) = 1;
6404 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6408 return DECL_RTL (exp);
6411 return immed_double_const (TREE_INT_CST_LOW (exp),
6412 TREE_INT_CST_HIGH (exp), mode);
6415 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6416 EXPAND_MEMORY_USE_BAD);
6419 /* If optimized, generate immediate CONST_DOUBLE
6420 which will be turned into memory by reload if necessary.
6422 We used to force a register so that loop.c could see it. But
6423 this does not allow gen_* patterns to perform optimizations with
6424 the constants. It also produces two insns in cases like "x = 1.0;".
6425 On most machines, floating-point constants are not permitted in
6426 many insns, so we'd end up copying it to a register in any case.
6428 Now, we do the copying in expand_binop, if appropriate. */
6429 return immed_real_const (exp);
6433 if (! TREE_CST_RTL (exp))
6434 output_constant_def (exp, 1);
6436 /* TREE_CST_RTL probably contains a constant address.
6437 On RISC machines where a constant address isn't valid,
6438 make some insns to get that address into a register. */
6439 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6440 && modifier != EXPAND_CONST_ADDRESS
6441 && modifier != EXPAND_INITIALIZER
6442 && modifier != EXPAND_SUM
6443 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6445 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6446 return replace_equiv_address (TREE_CST_RTL (exp),
6447 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6448 return TREE_CST_RTL (exp);
6450 case EXPR_WITH_FILE_LOCATION:
6453 const char *saved_input_filename = input_filename;
6454 int saved_lineno = lineno;
6455 input_filename = EXPR_WFL_FILENAME (exp);
6456 lineno = EXPR_WFL_LINENO (exp);
6457 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6458 emit_line_note (input_filename, lineno);
6459 /* Possibly avoid switching back and forth here. */
6460 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6461 input_filename = saved_input_filename;
6462 lineno = saved_lineno;
6467 context = decl_function_context (exp);
6469 /* If this SAVE_EXPR was at global context, assume we are an
6470 initialization function and move it into our context. */
6472 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6474 /* We treat inline_function_decl as an alias for the current function
6475 because that is the inline function whose vars, types, etc.
6476 are being merged into the current function.
6477 See expand_inline_function. */
6478 if (context == current_function_decl || context == inline_function_decl)
6481 /* If this is non-local, handle it. */
6484 /* The following call just exists to abort if the context is
6485 not of a containing function. */
6486 find_function_data (context);
6488 temp = SAVE_EXPR_RTL (exp);
6489 if (temp && GET_CODE (temp) == REG)
6491 put_var_into_stack (exp);
6492 temp = SAVE_EXPR_RTL (exp);
6494 if (temp == 0 || GET_CODE (temp) != MEM)
6497 replace_equiv_address (temp,
6498 fix_lexical_addr (XEXP (temp, 0), exp));
6500 if (SAVE_EXPR_RTL (exp) == 0)
6502 if (mode == VOIDmode)
6505 temp = assign_temp (build_qualified_type (type,
6507 | TYPE_QUAL_CONST)),
6510 SAVE_EXPR_RTL (exp) = temp;
6511 if (!optimize && GET_CODE (temp) == REG)
6512 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6515 /* If the mode of TEMP does not match that of the expression, it
6516 must be a promoted value. We pass store_expr a SUBREG of the
6517 wanted mode but mark it so that we know that it was already
6518 extended. Note that `unsignedp' was modified above in
6521 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6523 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6524 SUBREG_PROMOTED_VAR_P (temp) = 1;
6525 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6528 if (temp == const0_rtx)
6529 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6530 EXPAND_MEMORY_USE_BAD);
6532 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6534 TREE_USED (exp) = 1;
6537 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6538 must be a promoted value. We return a SUBREG of the wanted mode,
6539 but mark it so that we know that it was already extended. */
6541 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6542 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6544 /* Compute the signedness and make the proper SUBREG. */
6545 promote_mode (type, mode, &unsignedp, 0);
6546 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6547 SUBREG_PROMOTED_VAR_P (temp) = 1;
6548 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6552 return SAVE_EXPR_RTL (exp);
6557 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6558 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6562 case PLACEHOLDER_EXPR:
6564 tree placeholder_expr;
6566 /* If there is an object on the head of the placeholder list,
6567 see if some object in it of type TYPE or a pointer to it. For
6568 further information, see tree.def. */
6569 for (placeholder_expr = placeholder_list;
6570 placeholder_expr != 0;
6571 placeholder_expr = TREE_CHAIN (placeholder_expr))
6573 tree need_type = TYPE_MAIN_VARIANT (type);
6575 tree old_list = placeholder_list;
6578 /* Find the outermost reference that is of the type we want.
6579 If none, see if any object has a type that is a pointer to
6580 the type we want. */
6581 for (elt = TREE_PURPOSE (placeholder_expr);
6582 elt != 0 && object == 0;
6584 = ((TREE_CODE (elt) == COMPOUND_EXPR
6585 || TREE_CODE (elt) == COND_EXPR)
6586 ? TREE_OPERAND (elt, 1)
6587 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6588 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6589 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6590 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6591 ? TREE_OPERAND (elt, 0) : 0))
6592 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6595 for (elt = TREE_PURPOSE (placeholder_expr);
6596 elt != 0 && object == 0;
6598 = ((TREE_CODE (elt) == COMPOUND_EXPR
6599 || TREE_CODE (elt) == COND_EXPR)
6600 ? TREE_OPERAND (elt, 1)
6601 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6602 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6603 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6604 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6605 ? TREE_OPERAND (elt, 0) : 0))
6606 if (POINTER_TYPE_P (TREE_TYPE (elt))
6607 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6609 object = build1 (INDIRECT_REF, need_type, elt);
6613 /* Expand this object skipping the list entries before
6614 it was found in case it is also a PLACEHOLDER_EXPR.
6615 In that case, we want to translate it using subsequent
6617 placeholder_list = TREE_CHAIN (placeholder_expr);
6618 temp = expand_expr (object, original_target, tmode,
6620 placeholder_list = old_list;
6626 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6629 case WITH_RECORD_EXPR:
6630 /* Put the object on the placeholder list, expand our first operand,
6631 and pop the list. */
6632 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6634 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6635 tmode, ro_modifier);
6636 placeholder_list = TREE_CHAIN (placeholder_list);
6640 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6641 expand_goto (TREE_OPERAND (exp, 0));
6643 expand_computed_goto (TREE_OPERAND (exp, 0));
6647 expand_exit_loop_if_false (NULL,
6648 invert_truthvalue (TREE_OPERAND (exp, 0)));
6651 case LABELED_BLOCK_EXPR:
6652 if (LABELED_BLOCK_BODY (exp))
6653 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6654 /* Should perhaps use expand_label, but this is simpler and safer. */
6655 do_pending_stack_adjust ();
6656 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6659 case EXIT_BLOCK_EXPR:
6660 if (EXIT_BLOCK_RETURN (exp))
6661 sorry ("returned value in block_exit_expr");
6662 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6667 expand_start_loop (1);
6668 expand_expr_stmt (TREE_OPERAND (exp, 0));
6676 tree vars = TREE_OPERAND (exp, 0);
6677 int vars_need_expansion = 0;
6679 /* Need to open a binding contour here because
6680 if there are any cleanups they must be contained here. */
6681 expand_start_bindings (2);
6683 /* Mark the corresponding BLOCK for output in its proper place. */
6684 if (TREE_OPERAND (exp, 2) != 0
6685 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6686 insert_block (TREE_OPERAND (exp, 2));
6688 /* If VARS have not yet been expanded, expand them now. */
6691 if (!DECL_RTL_SET_P (vars))
6693 vars_need_expansion = 1;
6696 expand_decl_init (vars);
6697 vars = TREE_CHAIN (vars);
6700 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6702 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6708 if (RTL_EXPR_SEQUENCE (exp))
6710 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6712 emit_insns (RTL_EXPR_SEQUENCE (exp));
6713 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6715 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6716 free_temps_for_rtl_expr (exp);
6717 return RTL_EXPR_RTL (exp);
6720 /* If we don't need the result, just ensure we evaluate any
6725 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6726 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6727 EXPAND_MEMORY_USE_BAD);
6731 /* All elts simple constants => refer to a constant in memory. But
6732 if this is a non-BLKmode mode, let it store a field at a time
6733 since that should make a CONST_INT or CONST_DOUBLE when we
6734 fold. Likewise, if we have a target we can use, it is best to
6735 store directly into the target unless the type is large enough
6736 that memcpy will be used. If we are making an initializer and
6737 all operands are constant, put it in memory as well. */
6738 else if ((TREE_STATIC (exp)
6739 && ((mode == BLKmode
6740 && ! (target != 0 && safe_from_p (target, exp, 1)))
6741 || TREE_ADDRESSABLE (exp)
6742 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6743 && (! MOVE_BY_PIECES_P
6744 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6746 && ! mostly_zeros_p (exp))))
6747 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6749 rtx constructor = output_constant_def (exp, 1);
6751 if (modifier != EXPAND_CONST_ADDRESS
6752 && modifier != EXPAND_INITIALIZER
6753 && modifier != EXPAND_SUM)
6754 constructor = validize_mem (constructor);
6760 /* Handle calls that pass values in multiple non-contiguous
6761 locations. The Irix 6 ABI has examples of this. */
6762 if (target == 0 || ! safe_from_p (target, exp, 1)
6763 || GET_CODE (target) == PARALLEL)
6765 = assign_temp (build_qualified_type (type,
6767 | (TREE_READONLY (exp)
6768 * TYPE_QUAL_CONST))),
6769 TREE_ADDRESSABLE (exp), 1, 1);
6771 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6772 int_size_in_bytes (TREE_TYPE (exp)));
6778 tree exp1 = TREE_OPERAND (exp, 0);
6780 tree string = string_constant (exp1, &index);
6782 /* Try to optimize reads from const strings. */
6784 && TREE_CODE (string) == STRING_CST
6785 && TREE_CODE (index) == INTEGER_CST
6786 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6787 && GET_MODE_CLASS (mode) == MODE_INT
6788 && GET_MODE_SIZE (mode) == 1
6789 && modifier != EXPAND_MEMORY_USE_WO)
6791 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6793 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6794 op0 = memory_address (mode, op0);
6796 if (cfun && current_function_check_memory_usage
6797 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6799 enum memory_use_mode memory_usage;
6800 memory_usage = get_memory_usage_from_modifier (modifier);
6802 if (memory_usage != MEMORY_USE_DONT)
6804 in_check_memory_usage = 1;
6805 emit_library_call (chkr_check_addr_libfunc,
6806 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6807 Pmode, GEN_INT (int_size_in_bytes (type)),
6808 TYPE_MODE (sizetype),
6809 GEN_INT (memory_usage),
6810 TYPE_MODE (integer_type_node));
6811 in_check_memory_usage = 0;
6815 temp = gen_rtx_MEM (mode, op0);
6816 set_mem_attributes (temp, exp, 0);
6818 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6819 here, because, in C and C++, the fact that a location is accessed
6820 through a pointer to const does not mean that the value there can
6821 never change. Languages where it can never change should
6822 also set TREE_STATIC. */
6823 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6825 /* If we are writing to this object and its type is a record with
6826 readonly fields, we must mark it as readonly so it will
6827 conflict with readonly references to those fields. */
6828 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6829 RTX_UNCHANGING_P (temp) = 1;
6835 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6839 tree array = TREE_OPERAND (exp, 0);
6840 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6841 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6842 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6845 /* Optimize the special-case of a zero lower bound.
6847 We convert the low_bound to sizetype to avoid some problems
6848 with constant folding. (E.g. suppose the lower bound is 1,
6849 and its mode is QI. Without the conversion, (ARRAY
6850 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6851 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6853 if (! integer_zerop (low_bound))
6854 index = size_diffop (index, convert (sizetype, low_bound));
6856 /* Fold an expression like: "foo"[2].
6857 This is not done in fold so it won't happen inside &.
6858 Don't fold if this is for wide characters since it's too
6859 difficult to do correctly and this is a very rare case. */
6861 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6862 && TREE_CODE (array) == STRING_CST
6863 && TREE_CODE (index) == INTEGER_CST
6864 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6865 && GET_MODE_CLASS (mode) == MODE_INT
6866 && GET_MODE_SIZE (mode) == 1)
6868 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6870 /* If this is a constant index into a constant array,
6871 just get the value from the array. Handle both the cases when
6872 we have an explicit constructor and when our operand is a variable
6873 that was declared const. */
6875 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6876 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6877 && TREE_CODE (index) == INTEGER_CST
6878 && 0 > compare_tree_int (index,
6879 list_length (CONSTRUCTOR_ELTS
6880 (TREE_OPERAND (exp, 0)))))
6884 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6885 i = TREE_INT_CST_LOW (index);
6886 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6890 return expand_expr (fold (TREE_VALUE (elem)), target,
6891 tmode, ro_modifier);
6894 else if (optimize >= 1
6895 && modifier != EXPAND_CONST_ADDRESS
6896 && modifier != EXPAND_INITIALIZER
6897 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6898 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6899 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6901 if (TREE_CODE (index) == INTEGER_CST)
6903 tree init = DECL_INITIAL (array);
6905 if (TREE_CODE (init) == CONSTRUCTOR)
6909 for (elem = CONSTRUCTOR_ELTS (init);
6911 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6912 elem = TREE_CHAIN (elem))
6915 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6916 return expand_expr (fold (TREE_VALUE (elem)), target,
6917 tmode, ro_modifier);
6919 else if (TREE_CODE (init) == STRING_CST
6920 && 0 > compare_tree_int (index,
6921 TREE_STRING_LENGTH (init)))
6923 tree type = TREE_TYPE (TREE_TYPE (init));
6924 enum machine_mode mode = TYPE_MODE (type);
6926 if (GET_MODE_CLASS (mode) == MODE_INT
6927 && GET_MODE_SIZE (mode) == 1)
6929 (TREE_STRING_POINTER
6930 (init)[TREE_INT_CST_LOW (index)]));
6939 case ARRAY_RANGE_REF:
6940 /* If the operand is a CONSTRUCTOR, we can just extract the
6941 appropriate field if it is present. Don't do this if we have
6942 already written the data since we want to refer to that copy
6943 and varasm.c assumes that's what we'll do. */
6944 if (code == COMPONENT_REF
6945 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6946 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6950 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6951 elt = TREE_CHAIN (elt))
6952 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6953 /* We can normally use the value of the field in the
6954 CONSTRUCTOR. However, if this is a bitfield in
6955 an integral mode that we can fit in a HOST_WIDE_INT,
6956 we must mask only the number of bits in the bitfield,
6957 since this is done implicitly by the constructor. If
6958 the bitfield does not meet either of those conditions,
6959 we can't do this optimization. */
6960 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6961 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6963 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6964 <= HOST_BITS_PER_WIDE_INT))))
6966 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6967 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6969 HOST_WIDE_INT bitsize
6970 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6972 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6974 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6975 op0 = expand_and (op0, op1, target);
6979 enum machine_mode imode
6980 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6982 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6985 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6987 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6997 enum machine_mode mode1;
6998 HOST_WIDE_INT bitsize, bitpos;
7001 unsigned int alignment;
7002 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7003 &mode1, &unsignedp, &volatilep,
7006 /* If we got back the original object, something is wrong. Perhaps
7007 we are evaluating an expression too early. In any event, don't
7008 infinitely recurse. */
7012 /* If TEM's type is a union of variable size, pass TARGET to the inner
7013 computation, since it will need a temporary and TARGET is known
7014 to have to do. This occurs in unchecked conversion in Ada. */
7016 op0 = expand_expr (tem,
7017 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7018 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7020 ? target : NULL_RTX),
7022 (modifier == EXPAND_INITIALIZER
7023 || modifier == EXPAND_CONST_ADDRESS)
7024 ? modifier : EXPAND_NORMAL);
7026 /* If this is a constant, put it into a register if it is a
7027 legitimate constant and OFFSET is 0 and memory if it isn't. */
7028 if (CONSTANT_P (op0))
7030 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7031 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7033 op0 = force_reg (mode, op0);
7035 op0 = validize_mem (force_const_mem (mode, op0));
7040 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7042 /* If this object is in a register, put it into memory.
7043 This case can't occur in C, but can in Ada if we have
7044 unchecked conversion of an expression from a scalar type to
7045 an array or record type. */
7046 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7047 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7049 /* If the operand is a SAVE_EXPR, we can deal with this by
7050 forcing the SAVE_EXPR into memory. */
7051 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7053 put_var_into_stack (TREE_OPERAND (exp, 0));
7054 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7059 = build_qualified_type (TREE_TYPE (tem),
7060 (TYPE_QUALS (TREE_TYPE (tem))
7061 | TYPE_QUAL_CONST));
7062 rtx memloc = assign_temp (nt, 1, 1, 1);
7064 mark_temp_addr_taken (memloc);
7065 emit_move_insn (memloc, op0);
7070 if (GET_CODE (op0) != MEM)
7073 if (GET_MODE (offset_rtx) != ptr_mode)
7075 #ifdef POINTERS_EXTEND_UNSIGNED
7076 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7078 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7082 /* A constant address in OP0 can have VOIDmode, we must not try
7083 to call force_reg for that case. Avoid that case. */
7084 if (GET_CODE (op0) == MEM
7085 && GET_MODE (op0) == BLKmode
7086 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7088 && (bitpos % bitsize) == 0
7089 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7090 && alignment == GET_MODE_ALIGNMENT (mode1))
7092 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7094 if (GET_CODE (XEXP (temp, 0)) == REG)
7097 op0 = (replace_equiv_address
7099 force_reg (GET_MODE (XEXP (temp, 0)),
7104 op0 = change_address (op0, VOIDmode,
7105 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7106 force_reg (ptr_mode,
7110 /* Don't forget about volatility even if this is a bitfield. */
7111 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7113 op0 = copy_rtx (op0);
7114 MEM_VOLATILE_P (op0) = 1;
7117 /* Check the access. */
7118 if (cfun != 0 && current_function_check_memory_usage
7119 && GET_CODE (op0) == MEM)
7121 enum memory_use_mode memory_usage;
7122 memory_usage = get_memory_usage_from_modifier (modifier);
7124 if (memory_usage != MEMORY_USE_DONT)
7129 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7130 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7132 /* Check the access right of the pointer. */
7133 in_check_memory_usage = 1;
7134 if (size > BITS_PER_UNIT)
7135 emit_library_call (chkr_check_addr_libfunc,
7136 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7137 Pmode, GEN_INT (size / BITS_PER_UNIT),
7138 TYPE_MODE (sizetype),
7139 GEN_INT (memory_usage),
7140 TYPE_MODE (integer_type_node));
7141 in_check_memory_usage = 0;
7145 /* In cases where an aligned union has an unaligned object
7146 as a field, we might be extracting a BLKmode value from
7147 an integer-mode (e.g., SImode) object. Handle this case
7148 by doing the extract into an object as wide as the field
7149 (which we know to be the width of a basic mode), then
7150 storing into memory, and changing the mode to BLKmode. */
7151 if (mode1 == VOIDmode
7152 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7153 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7154 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7155 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7156 && modifier != EXPAND_CONST_ADDRESS
7157 && modifier != EXPAND_INITIALIZER)
7158 /* If the field isn't aligned enough to fetch as a memref,
7159 fetch it as a bit field. */
7160 || (mode1 != BLKmode
7161 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7162 && ((TYPE_ALIGN (TREE_TYPE (tem))
7163 < GET_MODE_ALIGNMENT (mode))
7164 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7165 /* If the type and the field are a constant size and the
7166 size of the type isn't the same size as the bitfield,
7167 we must use bitfield operations. */
7169 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7171 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7174 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7175 && (TYPE_ALIGN (type) > alignment
7176 || bitpos % TYPE_ALIGN (type) != 0)))
7178 enum machine_mode ext_mode = mode;
7180 if (ext_mode == BLKmode
7181 && ! (target != 0 && GET_CODE (op0) == MEM
7182 && GET_CODE (target) == MEM
7183 && bitpos % BITS_PER_UNIT == 0))
7184 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7186 if (ext_mode == BLKmode)
7188 /* In this case, BITPOS must start at a byte boundary and
7189 TARGET, if specified, must be a MEM. */
7190 if (GET_CODE (op0) != MEM
7191 || (target != 0 && GET_CODE (target) != MEM)
7192 || bitpos % BITS_PER_UNIT != 0)
7195 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7197 target = assign_temp (type, 0, 1, 1);
7199 emit_block_move (target, op0,
7200 bitsize == -1 ? expr_size (exp)
7201 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7208 op0 = validize_mem (op0);
7210 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7211 mark_reg_pointer (XEXP (op0, 0), alignment);
7213 op0 = extract_bit_field (op0, bitsize, bitpos,
7214 unsignedp, target, ext_mode, ext_mode,
7216 int_size_in_bytes (TREE_TYPE (tem)));
7218 /* If the result is a record type and BITSIZE is narrower than
7219 the mode of OP0, an integral mode, and this is a big endian
7220 machine, we must put the field into the high-order bits. */
7221 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7222 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7223 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7224 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7225 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7229 if (mode == BLKmode)
7231 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7233 rtx new = assign_temp (nt, 0, 1, 1);
7235 emit_move_insn (new, op0);
7236 op0 = copy_rtx (new);
7237 PUT_MODE (op0, BLKmode);
7243 /* If the result is BLKmode, use that to access the object
7245 if (mode == BLKmode)
7248 /* Get a reference to just this component. */
7249 if (modifier == EXPAND_CONST_ADDRESS
7250 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7251 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7253 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7255 set_mem_attributes (op0, exp, 0);
7256 if (GET_CODE (XEXP (op0, 0)) == REG)
7257 mark_reg_pointer (XEXP (op0, 0), alignment);
7259 MEM_VOLATILE_P (op0) |= volatilep;
7260 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7261 || modifier == EXPAND_CONST_ADDRESS
7262 || modifier == EXPAND_INITIALIZER)
7264 else if (target == 0)
7265 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7267 convert_move (target, op0, unsignedp);
7271 /* Intended for a reference to a buffer of a file-object in Pascal.
7272 But it's not certain that a special tree code will really be
7273 necessary for these. INDIRECT_REF might work for them. */
7279 /* Pascal set IN expression.
7282 rlo = set_low - (set_low%bits_per_word);
7283 the_word = set [ (index - rlo)/bits_per_word ];
7284 bit_index = index % bits_per_word;
7285 bitmask = 1 << bit_index;
7286 return !!(the_word & bitmask); */
7288 tree set = TREE_OPERAND (exp, 0);
7289 tree index = TREE_OPERAND (exp, 1);
7290 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7291 tree set_type = TREE_TYPE (set);
7292 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7293 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7294 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7295 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7296 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7297 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7298 rtx setaddr = XEXP (setval, 0);
7299 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7301 rtx diff, quo, rem, addr, bit, result;
7303 /* If domain is empty, answer is no. Likewise if index is constant
7304 and out of bounds. */
7305 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7306 && TREE_CODE (set_low_bound) == INTEGER_CST
7307 && tree_int_cst_lt (set_high_bound, set_low_bound))
7308 || (TREE_CODE (index) == INTEGER_CST
7309 && TREE_CODE (set_low_bound) == INTEGER_CST
7310 && tree_int_cst_lt (index, set_low_bound))
7311 || (TREE_CODE (set_high_bound) == INTEGER_CST
7312 && TREE_CODE (index) == INTEGER_CST
7313 && tree_int_cst_lt (set_high_bound, index))))
7317 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7319 /* If we get here, we have to generate the code for both cases
7320 (in range and out of range). */
7322 op0 = gen_label_rtx ();
7323 op1 = gen_label_rtx ();
7325 if (! (GET_CODE (index_val) == CONST_INT
7326 && GET_CODE (lo_r) == CONST_INT))
7328 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7329 GET_MODE (index_val), iunsignedp, 0, op1);
7332 if (! (GET_CODE (index_val) == CONST_INT
7333 && GET_CODE (hi_r) == CONST_INT))
7335 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7336 GET_MODE (index_val), iunsignedp, 0, op1);
7339 /* Calculate the element number of bit zero in the first word
7341 if (GET_CODE (lo_r) == CONST_INT)
7342 rlow = GEN_INT (INTVAL (lo_r)
7343 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7345 rlow = expand_binop (index_mode, and_optab, lo_r,
7346 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7347 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7349 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7350 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7352 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7353 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7354 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7355 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7357 addr = memory_address (byte_mode,
7358 expand_binop (index_mode, add_optab, diff,
7359 setaddr, NULL_RTX, iunsignedp,
7362 /* Extract the bit we want to examine. */
7363 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7364 gen_rtx_MEM (byte_mode, addr),
7365 make_tree (TREE_TYPE (index), rem),
7367 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7368 GET_MODE (target) == byte_mode ? target : 0,
7369 1, OPTAB_LIB_WIDEN);
7371 if (result != target)
7372 convert_move (target, result, 1);
7374 /* Output the code to handle the out-of-range case. */
7377 emit_move_insn (target, const0_rtx);
7382 case WITH_CLEANUP_EXPR:
7383 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7385 WITH_CLEANUP_EXPR_RTL (exp)
7386 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7387 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7389 /* That's it for this cleanup. */
7390 TREE_OPERAND (exp, 1) = 0;
7392 return WITH_CLEANUP_EXPR_RTL (exp);
7394 case CLEANUP_POINT_EXPR:
7396 /* Start a new binding layer that will keep track of all cleanup
7397 actions to be performed. */
7398 expand_start_bindings (2);
7400 target_temp_slot_level = temp_slot_level;
7402 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7403 /* If we're going to use this value, load it up now. */
7405 op0 = force_not_mem (op0);
7406 preserve_temp_slots (op0);
7407 expand_end_bindings (NULL_TREE, 0, 0);
7412 /* Check for a built-in function. */
7413 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7414 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7416 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7418 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7419 == BUILT_IN_FRONTEND)
7420 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7422 return expand_builtin (exp, target, subtarget, tmode, ignore);
7425 return expand_call (exp, target, ignore);
7427 case NON_LVALUE_EXPR:
7430 case REFERENCE_EXPR:
7431 if (TREE_OPERAND (exp, 0) == error_mark_node)
7434 if (TREE_CODE (type) == UNION_TYPE)
7436 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7438 /* If both input and output are BLKmode, this conversion
7439 isn't actually doing anything unless we need to make the
7440 alignment stricter. */
7441 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7442 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7443 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7444 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7448 target = assign_temp (type, 0, 1, 1);
7450 if (GET_CODE (target) == MEM)
7451 /* Store data into beginning of memory target. */
7452 store_expr (TREE_OPERAND (exp, 0),
7453 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7455 else if (GET_CODE (target) == REG)
7456 /* Store this field into a union of the proper type. */
7457 store_field (target,
7458 MIN ((int_size_in_bytes (TREE_TYPE
7459 (TREE_OPERAND (exp, 0)))
7461 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7462 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7463 VOIDmode, 0, BITS_PER_UNIT,
7464 int_size_in_bytes (type), 0);
7468 /* Return the entire union. */
7472 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7474 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7477 /* If the signedness of the conversion differs and OP0 is
7478 a promoted SUBREG, clear that indication since we now
7479 have to do the proper extension. */
7480 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7481 && GET_CODE (op0) == SUBREG)
7482 SUBREG_PROMOTED_VAR_P (op0) = 0;
7487 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7488 if (GET_MODE (op0) == mode)
7491 /* If OP0 is a constant, just convert it into the proper mode. */
7492 if (CONSTANT_P (op0))
7494 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7495 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7497 if (modifier == EXPAND_INITIALIZER)
7498 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7502 convert_to_mode (mode, op0,
7503 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7505 convert_move (target, op0,
7506 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7510 /* We come here from MINUS_EXPR when the second operand is a
7513 this_optab = ! unsignedp && flag_trapv
7514 && (GET_MODE_CLASS(mode) == MODE_INT)
7515 ? addv_optab : add_optab;
7517 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7518 something else, make sure we add the register to the constant and
7519 then to the other thing. This case can occur during strength
7520 reduction and doing it this way will produce better code if the
7521 frame pointer or argument pointer is eliminated.
7523 fold-const.c will ensure that the constant is always in the inner
7524 PLUS_EXPR, so the only case we need to do anything about is if
7525 sp, ap, or fp is our second argument, in which case we must swap
7526 the innermost first argument and our second argument. */
7528 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7529 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7530 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7531 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7532 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7533 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7535 tree t = TREE_OPERAND (exp, 1);
7537 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7538 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7541 /* If the result is to be ptr_mode and we are adding an integer to
7542 something, we might be forming a constant. So try to use
7543 plus_constant. If it produces a sum and we can't accept it,
7544 use force_operand. This allows P = &ARR[const] to generate
7545 efficient code on machines where a SYMBOL_REF is not a valid
7548 If this is an EXPAND_SUM call, always return the sum. */
7549 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7550 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7552 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7553 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7554 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7558 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7560 /* Use immed_double_const to ensure that the constant is
7561 truncated according to the mode of OP1, then sign extended
7562 to a HOST_WIDE_INT. Using the constant directly can result
7563 in non-canonical RTL in a 64x32 cross compile. */
7565 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7567 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7568 op1 = plus_constant (op1, INTVAL (constant_part));
7569 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7570 op1 = force_operand (op1, target);
7574 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7575 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7576 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7580 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7582 if (! CONSTANT_P (op0))
7584 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7585 VOIDmode, modifier);
7586 /* Don't go to both_summands if modifier
7587 says it's not right to return a PLUS. */
7588 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7592 /* Use immed_double_const to ensure that the constant is
7593 truncated according to the mode of OP1, then sign extended
7594 to a HOST_WIDE_INT. Using the constant directly can result
7595 in non-canonical RTL in a 64x32 cross compile. */
7597 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7599 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7600 op0 = plus_constant (op0, INTVAL (constant_part));
7601 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7602 op0 = force_operand (op0, target);
7607 /* No sense saving up arithmetic to be done
7608 if it's all in the wrong mode to form part of an address.
7609 And force_operand won't know whether to sign-extend or
7611 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7612 || mode != ptr_mode)
7615 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7618 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7619 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7622 /* Make sure any term that's a sum with a constant comes last. */
7623 if (GET_CODE (op0) == PLUS
7624 && CONSTANT_P (XEXP (op0, 1)))
7630 /* If adding to a sum including a constant,
7631 associate it to put the constant outside. */
7632 if (GET_CODE (op1) == PLUS
7633 && CONSTANT_P (XEXP (op1, 1)))
7635 rtx constant_term = const0_rtx;
7637 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7640 /* Ensure that MULT comes first if there is one. */
7641 else if (GET_CODE (op0) == MULT)
7642 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7644 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7646 /* Let's also eliminate constants from op0 if possible. */
7647 op0 = eliminate_constant_term (op0, &constant_term);
7649 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7650 their sum should be a constant. Form it into OP1, since the
7651 result we want will then be OP0 + OP1. */
7653 temp = simplify_binary_operation (PLUS, mode, constant_term,
7658 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7661 /* Put a constant term last and put a multiplication first. */
7662 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7663 temp = op1, op1 = op0, op0 = temp;
7665 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7666 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7669 /* For initializers, we are allowed to return a MINUS of two
7670 symbolic constants. Here we handle all cases when both operands
7672 /* Handle difference of two symbolic constants,
7673 for the sake of an initializer. */
7674 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7675 && really_constant_p (TREE_OPERAND (exp, 0))
7676 && really_constant_p (TREE_OPERAND (exp, 1)))
7678 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7679 VOIDmode, ro_modifier);
7680 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7681 VOIDmode, ro_modifier);
7683 /* If the last operand is a CONST_INT, use plus_constant of
7684 the negated constant. Else make the MINUS. */
7685 if (GET_CODE (op1) == CONST_INT)
7686 return plus_constant (op0, - INTVAL (op1));
7688 return gen_rtx_MINUS (mode, op0, op1);
7690 /* Convert A - const to A + (-const). */
7691 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7693 tree negated = fold (build1 (NEGATE_EXPR, type,
7694 TREE_OPERAND (exp, 1)));
7696 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7697 /* If we can't negate the constant in TYPE, leave it alone and
7698 expand_binop will negate it for us. We used to try to do it
7699 here in the signed version of TYPE, but that doesn't work
7700 on POINTER_TYPEs. */;
7703 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7707 this_optab = ! unsignedp && flag_trapv
7708 && (GET_MODE_CLASS(mode) == MODE_INT)
7709 ? subv_optab : sub_optab;
7713 /* If first operand is constant, swap them.
7714 Thus the following special case checks need only
7715 check the second operand. */
7716 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7718 register tree t1 = TREE_OPERAND (exp, 0);
7719 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7720 TREE_OPERAND (exp, 1) = t1;
7723 /* Attempt to return something suitable for generating an
7724 indexed address, for machines that support that. */
7726 if (modifier == EXPAND_SUM && mode == ptr_mode
7727 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7728 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7730 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7733 /* Apply distributive law if OP0 is x+c. */
7734 if (GET_CODE (op0) == PLUS
7735 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7740 (mode, XEXP (op0, 0),
7741 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7742 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7743 * INTVAL (XEXP (op0, 1))));
7745 if (GET_CODE (op0) != REG)
7746 op0 = force_operand (op0, NULL_RTX);
7747 if (GET_CODE (op0) != REG)
7748 op0 = copy_to_mode_reg (mode, op0);
7751 gen_rtx_MULT (mode, op0,
7752 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7755 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7758 /* Check for multiplying things that have been extended
7759 from a narrower type. If this machine supports multiplying
7760 in that narrower type with a result in the desired type,
7761 do it that way, and avoid the explicit type-conversion. */
7762 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7763 && TREE_CODE (type) == INTEGER_TYPE
7764 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7765 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7766 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7767 && int_fits_type_p (TREE_OPERAND (exp, 1),
7768 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7769 /* Don't use a widening multiply if a shift will do. */
7770 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7771 > HOST_BITS_PER_WIDE_INT)
7772 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7774 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7775 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7777 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7778 /* If both operands are extended, they must either both
7779 be zero-extended or both be sign-extended. */
7780 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7782 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7784 enum machine_mode innermode
7785 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7786 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7787 ? smul_widen_optab : umul_widen_optab);
7788 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7789 ? umul_widen_optab : smul_widen_optab);
7790 if (mode == GET_MODE_WIDER_MODE (innermode))
7792 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7794 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7795 NULL_RTX, VOIDmode, 0);
7796 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7797 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7800 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7801 NULL_RTX, VOIDmode, 0);
7804 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7805 && innermode == word_mode)
7808 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7809 NULL_RTX, VOIDmode, 0);
7810 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7811 op1 = convert_modes (innermode, mode,
7812 expand_expr (TREE_OPERAND (exp, 1),
7813 NULL_RTX, VOIDmode, 0),
7816 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7817 NULL_RTX, VOIDmode, 0);
7818 temp = expand_binop (mode, other_optab, op0, op1, target,
7819 unsignedp, OPTAB_LIB_WIDEN);
7820 htem = expand_mult_highpart_adjust (innermode,
7821 gen_highpart (innermode, temp),
7823 gen_highpart (innermode, temp),
7825 emit_move_insn (gen_highpart (innermode, temp), htem);
7830 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7831 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7832 return expand_mult (mode, op0, op1, target, unsignedp);
7834 case TRUNC_DIV_EXPR:
7835 case FLOOR_DIV_EXPR:
7837 case ROUND_DIV_EXPR:
7838 case EXACT_DIV_EXPR:
7839 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7841 /* Possible optimization: compute the dividend with EXPAND_SUM
7842 then if the divisor is constant can optimize the case
7843 where some terms of the dividend have coeffs divisible by it. */
7844 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7845 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7846 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7849 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7850 expensive divide. If not, combine will rebuild the original
7852 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7853 && !real_onep (TREE_OPERAND (exp, 0)))
7854 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7855 build (RDIV_EXPR, type,
7856 build_real (type, dconst1),
7857 TREE_OPERAND (exp, 1))),
7858 target, tmode, unsignedp);
7859 this_optab = sdiv_optab;
7862 case TRUNC_MOD_EXPR:
7863 case FLOOR_MOD_EXPR:
7865 case ROUND_MOD_EXPR:
7866 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7868 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7869 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7870 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7872 case FIX_ROUND_EXPR:
7873 case FIX_FLOOR_EXPR:
7875 abort (); /* Not used for C. */
7877 case FIX_TRUNC_EXPR:
7878 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7880 target = gen_reg_rtx (mode);
7881 expand_fix (target, op0, unsignedp);
7885 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7887 target = gen_reg_rtx (mode);
7888 /* expand_float can't figure out what to do if FROM has VOIDmode.
7889 So give it the correct mode. With -O, cse will optimize this. */
7890 if (GET_MODE (op0) == VOIDmode)
7891 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7893 expand_float (target, op0,
7894 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7898 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7899 temp = expand_unop (mode,
7900 ! unsignedp && flag_trapv
7901 && (GET_MODE_CLASS(mode) == MODE_INT)
7902 ? negv_optab : neg_optab, op0, target, 0);
7908 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7910 /* Handle complex values specially. */
7911 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7912 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7913 return expand_complex_abs (mode, op0, target, unsignedp);
7915 /* Unsigned abs is simply the operand. Testing here means we don't
7916 risk generating incorrect code below. */
7917 if (TREE_UNSIGNED (type))
7920 return expand_abs (mode, op0, target, unsignedp,
7921 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7925 target = original_target;
7926 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7927 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7928 || GET_MODE (target) != mode
7929 || (GET_CODE (target) == REG
7930 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7931 target = gen_reg_rtx (mode);
7932 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7933 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7935 /* First try to do it with a special MIN or MAX instruction.
7936 If that does not win, use a conditional jump to select the proper
7938 this_optab = (TREE_UNSIGNED (type)
7939 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7940 : (code == MIN_EXPR ? smin_optab : smax_optab));
7942 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7947 /* At this point, a MEM target is no longer useful; we will get better
7950 if (GET_CODE (target) == MEM)
7951 target = gen_reg_rtx (mode);
7954 emit_move_insn (target, op0);
7956 op0 = gen_label_rtx ();
7958 /* If this mode is an integer too wide to compare properly,
7959 compare word by word. Rely on cse to optimize constant cases. */
7960 if (GET_MODE_CLASS (mode) == MODE_INT
7961 && ! can_compare_p (GE, mode, ccp_jump))
7963 if (code == MAX_EXPR)
7964 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7965 target, op1, NULL_RTX, op0);
7967 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7968 op1, target, NULL_RTX, op0);
7972 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7973 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7974 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7977 emit_move_insn (target, op1);
7982 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7983 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7989 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7990 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7995 /* ??? Can optimize bitwise operations with one arg constant.
7996 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7997 and (a bitwise1 b) bitwise2 b (etc)
7998 but that is probably not worth while. */
8000 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8001 boolean values when we want in all cases to compute both of them. In
8002 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8003 as actual zero-or-1 values and then bitwise anding. In cases where
8004 there cannot be any side effects, better code would be made by
8005 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8006 how to recognize those cases. */
8008 case TRUTH_AND_EXPR:
8010 this_optab = and_optab;
8015 this_optab = ior_optab;
8018 case TRUTH_XOR_EXPR:
8020 this_optab = xor_optab;
8027 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8029 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8030 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8033 /* Could determine the answer when only additive constants differ. Also,
8034 the addition of one can be handled by changing the condition. */
8041 case UNORDERED_EXPR:
8048 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8052 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8053 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8055 && GET_CODE (original_target) == REG
8056 && (GET_MODE (original_target)
8057 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8059 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8062 if (temp != original_target)
8063 temp = copy_to_reg (temp);
8065 op1 = gen_label_rtx ();
8066 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8067 GET_MODE (temp), unsignedp, 0, op1);
8068 emit_move_insn (temp, const1_rtx);
8073 /* If no set-flag instruction, must generate a conditional
8074 store into a temporary variable. Drop through
8075 and handle this like && and ||. */
8077 case TRUTH_ANDIF_EXPR:
8078 case TRUTH_ORIF_EXPR:
8080 && (target == 0 || ! safe_from_p (target, exp, 1)
8081 /* Make sure we don't have a hard reg (such as function's return
8082 value) live across basic blocks, if not optimizing. */
8083 || (!optimize && GET_CODE (target) == REG
8084 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8085 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8088 emit_clr_insn (target);
8090 op1 = gen_label_rtx ();
8091 jumpifnot (exp, op1);
8094 emit_0_to_1_insn (target);
8097 return ignore ? const0_rtx : target;
8099 case TRUTH_NOT_EXPR:
8100 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8101 /* The parser is careful to generate TRUTH_NOT_EXPR
8102 only with operands that are always zero or one. */
8103 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8104 target, 1, OPTAB_LIB_WIDEN);
8110 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8112 return expand_expr (TREE_OPERAND (exp, 1),
8113 (ignore ? const0_rtx : target),
8117 /* If we would have a "singleton" (see below) were it not for a
8118 conversion in each arm, bring that conversion back out. */
8119 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8120 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8121 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8122 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8124 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8125 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8127 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8128 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8129 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8130 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8131 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8132 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8133 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8134 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8135 return expand_expr (build1 (NOP_EXPR, type,
8136 build (COND_EXPR, TREE_TYPE (iftrue),
8137 TREE_OPERAND (exp, 0),
8139 target, tmode, modifier);
8143 /* Note that COND_EXPRs whose type is a structure or union
8144 are required to be constructed to contain assignments of
8145 a temporary variable, so that we can evaluate them here
8146 for side effect only. If type is void, we must do likewise. */
8148 /* If an arm of the branch requires a cleanup,
8149 only that cleanup is performed. */
8152 tree binary_op = 0, unary_op = 0;
8154 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8155 convert it to our mode, if necessary. */
8156 if (integer_onep (TREE_OPERAND (exp, 1))
8157 && integer_zerop (TREE_OPERAND (exp, 2))
8158 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8162 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8167 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8168 if (GET_MODE (op0) == mode)
8172 target = gen_reg_rtx (mode);
8173 convert_move (target, op0, unsignedp);
8177 /* Check for X ? A + B : A. If we have this, we can copy A to the
8178 output and conditionally add B. Similarly for unary operations.
8179 Don't do this if X has side-effects because those side effects
8180 might affect A or B and the "?" operation is a sequence point in
8181 ANSI. (operand_equal_p tests for side effects.) */
8183 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8184 && operand_equal_p (TREE_OPERAND (exp, 2),
8185 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8186 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8187 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8188 && operand_equal_p (TREE_OPERAND (exp, 1),
8189 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8190 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8191 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8192 && operand_equal_p (TREE_OPERAND (exp, 2),
8193 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8194 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8195 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8196 && operand_equal_p (TREE_OPERAND (exp, 1),
8197 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8198 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8200 /* If we are not to produce a result, we have no target. Otherwise,
8201 if a target was specified use it; it will not be used as an
8202 intermediate target unless it is safe. If no target, use a
8207 else if (original_target
8208 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8209 || (singleton && GET_CODE (original_target) == REG
8210 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8211 && original_target == var_rtx (singleton)))
8212 && GET_MODE (original_target) == mode
8213 #ifdef HAVE_conditional_move
8214 && (! can_conditionally_move_p (mode)
8215 || GET_CODE (original_target) == REG
8216 || TREE_ADDRESSABLE (type))
8218 && (GET_CODE (original_target) != MEM
8219 || TREE_ADDRESSABLE (type)))
8220 temp = original_target;
8221 else if (TREE_ADDRESSABLE (type))
8224 temp = assign_temp (type, 0, 0, 1);
8226 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8227 do the test of X as a store-flag operation, do this as
8228 A + ((X != 0) << log C). Similarly for other simple binary
8229 operators. Only do for C == 1 if BRANCH_COST is low. */
8230 if (temp && singleton && binary_op
8231 && (TREE_CODE (binary_op) == PLUS_EXPR
8232 || TREE_CODE (binary_op) == MINUS_EXPR
8233 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8234 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8235 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8236 : integer_onep (TREE_OPERAND (binary_op, 1)))
8237 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8240 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8241 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8242 ? addv_optab : add_optab)
8243 : TREE_CODE (binary_op) == MINUS_EXPR
8244 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8245 ? subv_optab : sub_optab)
8246 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8249 /* If we had X ? A : A + 1, do this as A + (X == 0).
8251 We have to invert the truth value here and then put it
8252 back later if do_store_flag fails. We cannot simply copy
8253 TREE_OPERAND (exp, 0) to another variable and modify that
8254 because invert_truthvalue can modify the tree pointed to
8256 if (singleton == TREE_OPERAND (exp, 1))
8257 TREE_OPERAND (exp, 0)
8258 = invert_truthvalue (TREE_OPERAND (exp, 0));
8260 result = do_store_flag (TREE_OPERAND (exp, 0),
8261 (safe_from_p (temp, singleton, 1)
8263 mode, BRANCH_COST <= 1);
8265 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8266 result = expand_shift (LSHIFT_EXPR, mode, result,
8267 build_int_2 (tree_log2
8271 (safe_from_p (temp, singleton, 1)
8272 ? temp : NULL_RTX), 0);
8276 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8277 return expand_binop (mode, boptab, op1, result, temp,
8278 unsignedp, OPTAB_LIB_WIDEN);
8280 else if (singleton == TREE_OPERAND (exp, 1))
8281 TREE_OPERAND (exp, 0)
8282 = invert_truthvalue (TREE_OPERAND (exp, 0));
8285 do_pending_stack_adjust ();
8287 op0 = gen_label_rtx ();
8289 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8293 /* If the target conflicts with the other operand of the
8294 binary op, we can't use it. Also, we can't use the target
8295 if it is a hard register, because evaluating the condition
8296 might clobber it. */
8298 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8299 || (GET_CODE (temp) == REG
8300 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8301 temp = gen_reg_rtx (mode);
8302 store_expr (singleton, temp, 0);
8305 expand_expr (singleton,
8306 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8307 if (singleton == TREE_OPERAND (exp, 1))
8308 jumpif (TREE_OPERAND (exp, 0), op0);
8310 jumpifnot (TREE_OPERAND (exp, 0), op0);
8312 start_cleanup_deferral ();
8313 if (binary_op && temp == 0)
8314 /* Just touch the other operand. */
8315 expand_expr (TREE_OPERAND (binary_op, 1),
8316 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8318 store_expr (build (TREE_CODE (binary_op), type,
8319 make_tree (type, temp),
8320 TREE_OPERAND (binary_op, 1)),
8323 store_expr (build1 (TREE_CODE (unary_op), type,
8324 make_tree (type, temp)),
8328 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8329 comparison operator. If we have one of these cases, set the
8330 output to A, branch on A (cse will merge these two references),
8331 then set the output to FOO. */
8333 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8334 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8335 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8336 TREE_OPERAND (exp, 1), 0)
8337 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8338 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8339 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8341 if (GET_CODE (temp) == REG
8342 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8343 temp = gen_reg_rtx (mode);
8344 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8345 jumpif (TREE_OPERAND (exp, 0), op0);
8347 start_cleanup_deferral ();
8348 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8352 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8353 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8354 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8355 TREE_OPERAND (exp, 2), 0)
8356 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8357 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8358 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8360 if (GET_CODE (temp) == REG
8361 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8362 temp = gen_reg_rtx (mode);
8363 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8364 jumpifnot (TREE_OPERAND (exp, 0), op0);
8366 start_cleanup_deferral ();
8367 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8372 op1 = gen_label_rtx ();
8373 jumpifnot (TREE_OPERAND (exp, 0), op0);
8375 start_cleanup_deferral ();
8377 /* One branch of the cond can be void, if it never returns. For
8378 example A ? throw : E */
8380 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8381 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8383 expand_expr (TREE_OPERAND (exp, 1),
8384 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8385 end_cleanup_deferral ();
8387 emit_jump_insn (gen_jump (op1));
8390 start_cleanup_deferral ();
8392 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8393 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8395 expand_expr (TREE_OPERAND (exp, 2),
8396 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8399 end_cleanup_deferral ();
8410 /* Something needs to be initialized, but we didn't know
8411 where that thing was when building the tree. For example,
8412 it could be the return value of a function, or a parameter
8413 to a function which lays down in the stack, or a temporary
8414 variable which must be passed by reference.
8416 We guarantee that the expression will either be constructed
8417 or copied into our original target. */
8419 tree slot = TREE_OPERAND (exp, 0);
8420 tree cleanups = NULL_TREE;
8423 if (TREE_CODE (slot) != VAR_DECL)
8427 target = original_target;
8429 /* Set this here so that if we get a target that refers to a
8430 register variable that's already been used, put_reg_into_stack
8431 knows that it should fix up those uses. */
8432 TREE_USED (slot) = 1;
8436 if (DECL_RTL_SET_P (slot))
8438 target = DECL_RTL (slot);
8439 /* If we have already expanded the slot, so don't do
8441 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8446 target = assign_temp (type, 2, 0, 1);
8447 /* All temp slots at this level must not conflict. */
8448 preserve_temp_slots (target);
8449 SET_DECL_RTL (slot, target);
8450 if (TREE_ADDRESSABLE (slot))
8451 put_var_into_stack (slot);
8453 /* Since SLOT is not known to the called function
8454 to belong to its stack frame, we must build an explicit
8455 cleanup. This case occurs when we must build up a reference
8456 to pass the reference as an argument. In this case,
8457 it is very likely that such a reference need not be
8460 if (TREE_OPERAND (exp, 2) == 0)
8461 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8462 cleanups = TREE_OPERAND (exp, 2);
8467 /* This case does occur, when expanding a parameter which
8468 needs to be constructed on the stack. The target
8469 is the actual stack address that we want to initialize.
8470 The function we call will perform the cleanup in this case. */
8472 /* If we have already assigned it space, use that space,
8473 not target that we were passed in, as our target
8474 parameter is only a hint. */
8475 if (DECL_RTL_SET_P (slot))
8477 target = DECL_RTL (slot);
8478 /* If we have already expanded the slot, so don't do
8480 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8485 SET_DECL_RTL (slot, target);
8486 /* If we must have an addressable slot, then make sure that
8487 the RTL that we just stored in slot is OK. */
8488 if (TREE_ADDRESSABLE (slot))
8489 put_var_into_stack (slot);
8493 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8494 /* Mark it as expanded. */
8495 TREE_OPERAND (exp, 1) = NULL_TREE;
8497 store_expr (exp1, target, 0);
8499 expand_decl_cleanup (NULL_TREE, cleanups);
8506 tree lhs = TREE_OPERAND (exp, 0);
8507 tree rhs = TREE_OPERAND (exp, 1);
8508 tree noncopied_parts = 0;
8509 tree lhs_type = TREE_TYPE (lhs);
8511 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8512 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8514 = init_noncopied_parts (stabilize_reference (lhs),
8515 TYPE_NONCOPIED_PARTS (lhs_type));
8517 while (noncopied_parts != 0)
8519 expand_assignment (TREE_VALUE (noncopied_parts),
8520 TREE_PURPOSE (noncopied_parts), 0, 0);
8521 noncopied_parts = TREE_CHAIN (noncopied_parts);
8528 /* If lhs is complex, expand calls in rhs before computing it.
8529 That's so we don't compute a pointer and save it over a call.
8530 If lhs is simple, compute it first so we can give it as a
8531 target if the rhs is just a call. This avoids an extra temp and copy
8532 and that prevents a partial-subsumption which makes bad code.
8533 Actually we could treat component_ref's of vars like vars. */
8535 tree lhs = TREE_OPERAND (exp, 0);
8536 tree rhs = TREE_OPERAND (exp, 1);
8537 tree noncopied_parts = 0;
8538 tree lhs_type = TREE_TYPE (lhs);
8542 /* Check for |= or &= of a bitfield of size one into another bitfield
8543 of size 1. In this case, (unless we need the result of the
8544 assignment) we can do this more efficiently with a
8545 test followed by an assignment, if necessary.
8547 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8548 things change so we do, this code should be enhanced to
8551 && TREE_CODE (lhs) == COMPONENT_REF
8552 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8553 || TREE_CODE (rhs) == BIT_AND_EXPR)
8554 && TREE_OPERAND (rhs, 0) == lhs
8555 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8556 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8557 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8559 rtx label = gen_label_rtx ();
8561 do_jump (TREE_OPERAND (rhs, 1),
8562 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8563 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8564 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8565 (TREE_CODE (rhs) == BIT_IOR_EXPR
8567 : integer_zero_node)),
8569 do_pending_stack_adjust ();
8574 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8575 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8577 = save_noncopied_parts (stabilize_reference (lhs),
8578 TYPE_NONCOPIED_PARTS (lhs_type));
8580 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8581 while (noncopied_parts != 0)
8583 expand_assignment (TREE_PURPOSE (noncopied_parts),
8584 TREE_VALUE (noncopied_parts), 0, 0);
8585 noncopied_parts = TREE_CHAIN (noncopied_parts);
8591 if (!TREE_OPERAND (exp, 0))
8592 expand_null_return ();
8594 expand_return (TREE_OPERAND (exp, 0));
8597 case PREINCREMENT_EXPR:
8598 case PREDECREMENT_EXPR:
8599 return expand_increment (exp, 0, ignore);
8601 case POSTINCREMENT_EXPR:
8602 case POSTDECREMENT_EXPR:
8603 /* Faster to treat as pre-increment if result is not used. */
8604 return expand_increment (exp, ! ignore, ignore);
8607 /* If nonzero, TEMP will be set to the address of something that might
8608 be a MEM corresponding to a stack slot. */
8611 /* Are we taking the address of a nested function? */
8612 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8613 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8614 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8615 && ! TREE_STATIC (exp))
8617 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8618 op0 = force_operand (op0, target);
8620 /* If we are taking the address of something erroneous, just
8622 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8626 /* We make sure to pass const0_rtx down if we came in with
8627 ignore set, to avoid doing the cleanups twice for something. */
8628 op0 = expand_expr (TREE_OPERAND (exp, 0),
8629 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8630 (modifier == EXPAND_INITIALIZER
8631 ? modifier : EXPAND_CONST_ADDRESS));
8633 /* If we are going to ignore the result, OP0 will have been set
8634 to const0_rtx, so just return it. Don't get confused and
8635 think we are taking the address of the constant. */
8639 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8640 clever and returns a REG when given a MEM. */
8641 op0 = protect_from_queue (op0, 1);
8643 /* We would like the object in memory. If it is a constant, we can
8644 have it be statically allocated into memory. For a non-constant,
8645 we need to allocate some memory and store the value into it. */
8647 if (CONSTANT_P (op0))
8648 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8650 else if (GET_CODE (op0) == MEM)
8652 mark_temp_addr_taken (op0);
8653 temp = XEXP (op0, 0);
8656 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8657 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8658 || GET_CODE (op0) == PARALLEL)
8660 /* If this object is in a register, it must be not
8662 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8663 tree nt = build_qualified_type (inner_type,
8664 (TYPE_QUALS (inner_type)
8665 | TYPE_QUAL_CONST));
8666 rtx memloc = assign_temp (nt, 1, 1, 1);
8668 mark_temp_addr_taken (memloc);
8669 if (GET_CODE (op0) == PARALLEL)
8670 /* Handle calls that pass values in multiple non-contiguous
8671 locations. The Irix 6 ABI has examples of this. */
8672 emit_group_store (memloc, op0,
8673 int_size_in_bytes (inner_type),
8674 TYPE_ALIGN (inner_type));
8676 emit_move_insn (memloc, op0);
8680 if (GET_CODE (op0) != MEM)
8683 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8685 temp = XEXP (op0, 0);
8686 #ifdef POINTERS_EXTEND_UNSIGNED
8687 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8688 && mode == ptr_mode)
8689 temp = convert_memory_address (ptr_mode, temp);
8694 op0 = force_operand (XEXP (op0, 0), target);
8697 if (flag_force_addr && GET_CODE (op0) != REG)
8698 op0 = force_reg (Pmode, op0);
8700 if (GET_CODE (op0) == REG
8701 && ! REG_USERVAR_P (op0))
8702 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8704 /* If we might have had a temp slot, add an equivalent address
8707 update_temp_slot_address (temp, op0);
8709 #ifdef POINTERS_EXTEND_UNSIGNED
8710 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8711 && mode == ptr_mode)
8712 op0 = convert_memory_address (ptr_mode, op0);
8717 case ENTRY_VALUE_EXPR:
8720 /* COMPLEX type for Extended Pascal & Fortran */
8723 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8726 /* Get the rtx code of the operands. */
8727 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8728 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8731 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8735 /* Move the real (op0) and imaginary (op1) parts to their location. */
8736 emit_move_insn (gen_realpart (mode, target), op0);
8737 emit_move_insn (gen_imagpart (mode, target), op1);
8739 insns = get_insns ();
8742 /* Complex construction should appear as a single unit. */
8743 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8744 each with a separate pseudo as destination.
8745 It's not correct for flow to treat them as a unit. */
8746 if (GET_CODE (target) != CONCAT)
8747 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8755 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8756 return gen_realpart (mode, op0);
8759 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8760 return gen_imagpart (mode, op0);
8764 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8768 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8771 target = gen_reg_rtx (mode);
8775 /* Store the realpart and the negated imagpart to target. */
8776 emit_move_insn (gen_realpart (partmode, target),
8777 gen_realpart (partmode, op0));
8779 imag_t = gen_imagpart (partmode, target);
8780 temp = expand_unop (partmode,
8781 ! unsignedp && flag_trapv
8782 && (GET_MODE_CLASS(partmode) == MODE_INT)
8783 ? negv_optab : neg_optab,
8784 gen_imagpart (partmode, op0), imag_t, 0);
8786 emit_move_insn (imag_t, temp);
8788 insns = get_insns ();
8791 /* Conjugate should appear as a single unit
8792 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8793 each with a separate pseudo as destination.
8794 It's not correct for flow to treat them as a unit. */
8795 if (GET_CODE (target) != CONCAT)
8796 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8803 case TRY_CATCH_EXPR:
8805 tree handler = TREE_OPERAND (exp, 1);
8807 expand_eh_region_start ();
8809 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8811 expand_eh_region_end_cleanup (handler);
8816 case TRY_FINALLY_EXPR:
8818 tree try_block = TREE_OPERAND (exp, 0);
8819 tree finally_block = TREE_OPERAND (exp, 1);
8820 rtx finally_label = gen_label_rtx ();
8821 rtx done_label = gen_label_rtx ();
8822 rtx return_link = gen_reg_rtx (Pmode);
8823 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8824 (tree) finally_label, (tree) return_link);
8825 TREE_SIDE_EFFECTS (cleanup) = 1;
8827 /* Start a new binding layer that will keep track of all cleanup
8828 actions to be performed. */
8829 expand_start_bindings (2);
8831 target_temp_slot_level = temp_slot_level;
8833 expand_decl_cleanup (NULL_TREE, cleanup);
8834 op0 = expand_expr (try_block, target, tmode, modifier);
8836 preserve_temp_slots (op0);
8837 expand_end_bindings (NULL_TREE, 0, 0);
8838 emit_jump (done_label);
8839 emit_label (finally_label);
8840 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8841 emit_indirect_jump (return_link);
8842 emit_label (done_label);
8846 case GOTO_SUBROUTINE_EXPR:
8848 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8849 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8850 rtx return_address = gen_label_rtx ();
8851 emit_move_insn (return_link,
8852 gen_rtx_LABEL_REF (Pmode, return_address));
8854 emit_label (return_address);
8859 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8862 return get_exception_pointer (cfun);
8865 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8868 /* Here to do an ordinary binary operator, generating an instruction
8869 from the optab already placed in `this_optab'. */
8871 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8873 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8874 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8876 temp = expand_binop (mode, this_optab, op0, op1, target,
8877 unsignedp, OPTAB_LIB_WIDEN);
8883 /* Similar to expand_expr, except that we don't specify a target, target
8884 mode, or modifier and we return the alignment of the inner type. This is
8885 used in cases where it is not necessary to align the result to the
8886 alignment of its type as long as we know the alignment of the result, for
8887 example for comparisons of BLKmode values. */
8890 expand_expr_unaligned (exp, palign)
8892 unsigned int *palign;
8895 tree type = TREE_TYPE (exp);
8896 register enum machine_mode mode = TYPE_MODE (type);
8898 /* Default the alignment we return to that of the type. */
8899 *palign = TYPE_ALIGN (type);
8901 /* The only cases in which we do anything special is if the resulting mode
8903 if (mode != BLKmode)
8904 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8906 switch (TREE_CODE (exp))
8910 case NON_LVALUE_EXPR:
8911 /* Conversions between BLKmode values don't change the underlying
8912 alignment or value. */
8913 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8914 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8918 /* Much of the code for this case is copied directly from expand_expr.
8919 We need to duplicate it here because we will do something different
8920 in the fall-through case, so we need to handle the same exceptions
8923 tree array = TREE_OPERAND (exp, 0);
8924 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8925 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8926 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8929 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8932 /* Optimize the special-case of a zero lower bound.
8934 We convert the low_bound to sizetype to avoid some problems
8935 with constant folding. (E.g. suppose the lower bound is 1,
8936 and its mode is QI. Without the conversion, (ARRAY
8937 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8938 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8940 if (! integer_zerop (low_bound))
8941 index = size_diffop (index, convert (sizetype, low_bound));
8943 /* If this is a constant index into a constant array,
8944 just get the value from the array. Handle both the cases when
8945 we have an explicit constructor and when our operand is a variable
8946 that was declared const. */
8948 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8949 && host_integerp (index, 0)
8950 && 0 > compare_tree_int (index,
8951 list_length (CONSTRUCTOR_ELTS
8952 (TREE_OPERAND (exp, 0)))))
8956 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8957 i = tree_low_cst (index, 0);
8958 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8962 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8965 else if (optimize >= 1
8966 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8967 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8968 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8970 if (TREE_CODE (index) == INTEGER_CST)
8972 tree init = DECL_INITIAL (array);
8974 if (TREE_CODE (init) == CONSTRUCTOR)
8978 for (elem = CONSTRUCTOR_ELTS (init);
8979 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8980 elem = TREE_CHAIN (elem))
8984 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8994 case ARRAY_RANGE_REF:
8995 /* If the operand is a CONSTRUCTOR, we can just extract the
8996 appropriate field if it is present. Don't do this if we have
8997 already written the data since we want to refer to that copy
8998 and varasm.c assumes that's what we'll do. */
8999 if (TREE_CODE (exp) == COMPONENT_REF
9000 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9001 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
9005 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
9006 elt = TREE_CHAIN (elt))
9007 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
9008 /* Note that unlike the case in expand_expr, we know this is
9009 BLKmode and hence not an integer. */
9010 return expand_expr_unaligned (TREE_VALUE (elt), palign);
9014 enum machine_mode mode1;
9015 HOST_WIDE_INT bitsize, bitpos;
9018 unsigned int alignment;
9020 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9021 &mode1, &unsignedp, &volatilep,
9024 /* If we got back the original object, something is wrong. Perhaps
9025 we are evaluating an expression too early. In any event, don't
9026 infinitely recurse. */
9030 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9032 /* If this is a constant, put it into a register if it is a
9033 legitimate constant and OFFSET is 0 and memory if it isn't. */
9034 if (CONSTANT_P (op0))
9036 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9038 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9040 op0 = force_reg (inner_mode, op0);
9042 op0 = validize_mem (force_const_mem (inner_mode, op0));
9047 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9049 /* If this object is in a register, put it into memory.
9050 This case can't occur in C, but can in Ada if we have
9051 unchecked conversion of an expression from a scalar type to
9052 an array or record type. */
9053 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9054 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9056 tree nt = build_qualified_type (TREE_TYPE (tem),
9057 (TYPE_QUALS (TREE_TYPE (tem))
9058 | TYPE_QUAL_CONST));
9059 rtx memloc = assign_temp (nt, 1, 1, 1);
9061 mark_temp_addr_taken (memloc);
9062 emit_move_insn (memloc, op0);
9066 if (GET_CODE (op0) != MEM)
9069 if (GET_MODE (offset_rtx) != ptr_mode)
9071 #ifdef POINTERS_EXTEND_UNSIGNED
9072 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9074 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9078 op0 = change_address (op0, VOIDmode,
9079 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9080 force_reg (ptr_mode,
9084 /* Don't forget about volatility even if this is a bitfield. */
9085 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9087 op0 = copy_rtx (op0);
9088 MEM_VOLATILE_P (op0) = 1;
9091 /* Check the access. */
9092 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9097 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9098 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9100 /* Check the access right of the pointer. */
9101 in_check_memory_usage = 1;
9102 if (size > BITS_PER_UNIT)
9103 emit_library_call (chkr_check_addr_libfunc,
9104 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9105 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9106 TYPE_MODE (sizetype),
9107 GEN_INT (MEMORY_USE_RO),
9108 TYPE_MODE (integer_type_node));
9109 in_check_memory_usage = 0;
9112 /* In cases where an aligned union has an unaligned object
9113 as a field, we might be extracting a BLKmode value from
9114 an integer-mode (e.g., SImode) object. Handle this case
9115 by doing the extract into an object as wide as the field
9116 (which we know to be the width of a basic mode), then
9117 storing into memory, and changing the mode to BLKmode.
9118 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9119 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9120 if (mode1 == VOIDmode
9121 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9122 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9123 && (TYPE_ALIGN (type) > alignment
9124 || bitpos % TYPE_ALIGN (type) != 0)))
9126 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9128 if (ext_mode == BLKmode)
9130 /* In this case, BITPOS must start at a byte boundary. */
9131 if (GET_CODE (op0) != MEM
9132 || bitpos % BITS_PER_UNIT != 0)
9135 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9139 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9141 rtx new = assign_temp (nt, 0, 1, 1);
9143 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9144 unsignedp, NULL_RTX, ext_mode,
9145 ext_mode, alignment,
9146 int_size_in_bytes (TREE_TYPE (tem)));
9148 /* If the result is a record type and BITSIZE is narrower than
9149 the mode of OP0, an integral mode, and this is a big endian
9150 machine, we must put the field into the high-order bits. */
9151 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9152 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9153 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9154 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9155 size_int (GET_MODE_BITSIZE
9160 emit_move_insn (new, op0);
9161 op0 = copy_rtx (new);
9162 PUT_MODE (op0, BLKmode);
9166 /* Get a reference to just this component. */
9167 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9169 set_mem_alias_set (op0, get_alias_set (exp));
9171 /* Adjust the alignment in case the bit position is not
9172 a multiple of the alignment of the inner object. */
9173 while (bitpos % alignment != 0)
9176 if (GET_CODE (XEXP (op0, 0)) == REG)
9177 mark_reg_pointer (XEXP (op0, 0), alignment);
9179 MEM_IN_STRUCT_P (op0) = 1;
9180 MEM_VOLATILE_P (op0) |= volatilep;
9182 *palign = alignment;
9191 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9194 /* Return the tree node if a ARG corresponds to a string constant or zero
9195 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9196 in bytes within the string that ARG is accessing. The type of the
9197 offset will be `sizetype'. */
9200 string_constant (arg, ptr_offset)
9206 if (TREE_CODE (arg) == ADDR_EXPR
9207 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9209 *ptr_offset = size_zero_node;
9210 return TREE_OPERAND (arg, 0);
9212 else if (TREE_CODE (arg) == PLUS_EXPR)
9214 tree arg0 = TREE_OPERAND (arg, 0);
9215 tree arg1 = TREE_OPERAND (arg, 1);
9220 if (TREE_CODE (arg0) == ADDR_EXPR
9221 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9223 *ptr_offset = convert (sizetype, arg1);
9224 return TREE_OPERAND (arg0, 0);
9226 else if (TREE_CODE (arg1) == ADDR_EXPR
9227 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9229 *ptr_offset = convert (sizetype, arg0);
9230 return TREE_OPERAND (arg1, 0);
9237 /* Expand code for a post- or pre- increment or decrement
9238 and return the RTX for the result.
9239 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9242 expand_increment (exp, post, ignore)
9246 register rtx op0, op1;
9247 register rtx temp, value;
9248 register tree incremented = TREE_OPERAND (exp, 0);
9249 optab this_optab = add_optab;
9251 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9252 int op0_is_copy = 0;
9253 int single_insn = 0;
9254 /* 1 means we can't store into OP0 directly,
9255 because it is a subreg narrower than a word,
9256 and we don't dare clobber the rest of the word. */
9259 /* Stabilize any component ref that might need to be
9260 evaluated more than once below. */
9262 || TREE_CODE (incremented) == BIT_FIELD_REF
9263 || (TREE_CODE (incremented) == COMPONENT_REF
9264 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9265 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9266 incremented = stabilize_reference (incremented);
9267 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9268 ones into save exprs so that they don't accidentally get evaluated
9269 more than once by the code below. */
9270 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9271 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9272 incremented = save_expr (incremented);
9274 /* Compute the operands as RTX.
9275 Note whether OP0 is the actual lvalue or a copy of it:
9276 I believe it is a copy iff it is a register or subreg
9277 and insns were generated in computing it. */
9279 temp = get_last_insn ();
9280 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9282 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9283 in place but instead must do sign- or zero-extension during assignment,
9284 so we copy it into a new register and let the code below use it as
9287 Note that we can safely modify this SUBREG since it is know not to be
9288 shared (it was made by the expand_expr call above). */
9290 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9293 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9297 else if (GET_CODE (op0) == SUBREG
9298 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9300 /* We cannot increment this SUBREG in place. If we are
9301 post-incrementing, get a copy of the old value. Otherwise,
9302 just mark that we cannot increment in place. */
9304 op0 = copy_to_reg (op0);
9309 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9310 && temp != get_last_insn ());
9311 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9312 EXPAND_MEMORY_USE_BAD);
9314 /* Decide whether incrementing or decrementing. */
9315 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9316 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9317 this_optab = sub_optab;
9319 /* Convert decrement by a constant into a negative increment. */
9320 if (this_optab == sub_optab
9321 && GET_CODE (op1) == CONST_INT)
9323 op1 = GEN_INT (-INTVAL (op1));
9324 this_optab = add_optab;
9327 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9328 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9330 /* For a preincrement, see if we can do this with a single instruction. */
9333 icode = (int) this_optab->handlers[(int) mode].insn_code;
9334 if (icode != (int) CODE_FOR_nothing
9335 /* Make sure that OP0 is valid for operands 0 and 1
9336 of the insn we want to queue. */
9337 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9338 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9339 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9343 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9344 then we cannot just increment OP0. We must therefore contrive to
9345 increment the original value. Then, for postincrement, we can return
9346 OP0 since it is a copy of the old value. For preincrement, expand here
9347 unless we can do it with a single insn.
9349 Likewise if storing directly into OP0 would clobber high bits
9350 we need to preserve (bad_subreg). */
9351 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9353 /* This is the easiest way to increment the value wherever it is.
9354 Problems with multiple evaluation of INCREMENTED are prevented
9355 because either (1) it is a component_ref or preincrement,
9356 in which case it was stabilized above, or (2) it is an array_ref
9357 with constant index in an array in a register, which is
9358 safe to reevaluate. */
9359 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9360 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9361 ? MINUS_EXPR : PLUS_EXPR),
9364 TREE_OPERAND (exp, 1));
9366 while (TREE_CODE (incremented) == NOP_EXPR
9367 || TREE_CODE (incremented) == CONVERT_EXPR)
9369 newexp = convert (TREE_TYPE (incremented), newexp);
9370 incremented = TREE_OPERAND (incremented, 0);
9373 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9374 return post ? op0 : temp;
9379 /* We have a true reference to the value in OP0.
9380 If there is an insn to add or subtract in this mode, queue it.
9381 Queueing the increment insn avoids the register shuffling
9382 that often results if we must increment now and first save
9383 the old value for subsequent use. */
9385 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9386 op0 = stabilize (op0);
9389 icode = (int) this_optab->handlers[(int) mode].insn_code;
9390 if (icode != (int) CODE_FOR_nothing
9391 /* Make sure that OP0 is valid for operands 0 and 1
9392 of the insn we want to queue. */
9393 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9394 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9396 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9397 op1 = force_reg (mode, op1);
9399 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9401 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9403 rtx addr = (general_operand (XEXP (op0, 0), mode)
9404 ? force_reg (Pmode, XEXP (op0, 0))
9405 : copy_to_reg (XEXP (op0, 0)));
9408 op0 = replace_equiv_address (op0, addr);
9409 temp = force_reg (GET_MODE (op0), op0);
9410 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9411 op1 = force_reg (mode, op1);
9413 /* The increment queue is LIFO, thus we have to `queue'
9414 the instructions in reverse order. */
9415 enqueue_insn (op0, gen_move_insn (op0, temp));
9416 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9421 /* Preincrement, or we can't increment with one simple insn. */
9423 /* Save a copy of the value before inc or dec, to return it later. */
9424 temp = value = copy_to_reg (op0);
9426 /* Arrange to return the incremented value. */
9427 /* Copy the rtx because expand_binop will protect from the queue,
9428 and the results of that would be invalid for us to return
9429 if our caller does emit_queue before using our result. */
9430 temp = copy_rtx (value = op0);
9432 /* Increment however we can. */
9433 op1 = expand_binop (mode, this_optab, value, op1,
9434 current_function_check_memory_usage ? NULL_RTX : op0,
9435 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9436 /* Make sure the value is stored into OP0. */
9438 emit_move_insn (op0, op1);
9443 /* At the start of a function, record that we have no previously-pushed
9444 arguments waiting to be popped. */
9447 init_pending_stack_adjust ()
9449 pending_stack_adjust = 0;
9452 /* When exiting from function, if safe, clear out any pending stack adjust
9453 so the adjustment won't get done.
9455 Note, if the current function calls alloca, then it must have a
9456 frame pointer regardless of the value of flag_omit_frame_pointer. */
9459 clear_pending_stack_adjust ()
9461 #ifdef EXIT_IGNORE_STACK
9463 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9464 && EXIT_IGNORE_STACK
9465 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9466 && ! flag_inline_functions)
9468 stack_pointer_delta -= pending_stack_adjust,
9469 pending_stack_adjust = 0;
9474 /* Pop any previously-pushed arguments that have not been popped yet. */
9477 do_pending_stack_adjust ()
9479 if (inhibit_defer_pop == 0)
9481 if (pending_stack_adjust != 0)
9482 adjust_stack (GEN_INT (pending_stack_adjust));
9483 pending_stack_adjust = 0;
9487 /* Expand conditional expressions. */
9489 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9490 LABEL is an rtx of code CODE_LABEL, in this function and all the
9494 jumpifnot (exp, label)
9498 do_jump (exp, label, NULL_RTX);
9501 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9508 do_jump (exp, NULL_RTX, label);
9511 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9512 the result is zero, or IF_TRUE_LABEL if the result is one.
9513 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9514 meaning fall through in that case.
9516 do_jump always does any pending stack adjust except when it does not
9517 actually perform a jump. An example where there is no jump
9518 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9520 This function is responsible for optimizing cases such as
9521 &&, || and comparison operators in EXP. */
9524 do_jump (exp, if_false_label, if_true_label)
9526 rtx if_false_label, if_true_label;
9528 register enum tree_code code = TREE_CODE (exp);
9529 /* Some cases need to create a label to jump to
9530 in order to properly fall through.
9531 These cases set DROP_THROUGH_LABEL nonzero. */
9532 rtx drop_through_label = 0;
9536 enum machine_mode mode;
9538 #ifdef MAX_INTEGER_COMPUTATION_MODE
9539 check_max_integer_computation_mode (exp);
9550 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9556 /* This is not true with #pragma weak */
9558 /* The address of something can never be zero. */
9560 emit_jump (if_true_label);
9565 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9566 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9567 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9568 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9571 /* If we are narrowing the operand, we have to do the compare in the
9573 if ((TYPE_PRECISION (TREE_TYPE (exp))
9574 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9576 case NON_LVALUE_EXPR:
9577 case REFERENCE_EXPR:
9582 /* These cannot change zero->non-zero or vice versa. */
9583 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9586 case WITH_RECORD_EXPR:
9587 /* Put the object on the placeholder list, recurse through our first
9588 operand, and pop the list. */
9589 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9591 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9592 placeholder_list = TREE_CHAIN (placeholder_list);
9596 /* This is never less insns than evaluating the PLUS_EXPR followed by
9597 a test and can be longer if the test is eliminated. */
9599 /* Reduce to minus. */
9600 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9601 TREE_OPERAND (exp, 0),
9602 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9603 TREE_OPERAND (exp, 1))));
9604 /* Process as MINUS. */
9608 /* Non-zero iff operands of minus differ. */
9609 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9610 TREE_OPERAND (exp, 0),
9611 TREE_OPERAND (exp, 1)),
9612 NE, NE, if_false_label, if_true_label);
9616 /* If we are AND'ing with a small constant, do this comparison in the
9617 smallest type that fits. If the machine doesn't have comparisons
9618 that small, it will be converted back to the wider comparison.
9619 This helps if we are testing the sign bit of a narrower object.
9620 combine can't do this for us because it can't know whether a
9621 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9623 if (! SLOW_BYTE_ACCESS
9624 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9625 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9626 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9627 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9628 && (type = type_for_mode (mode, 1)) != 0
9629 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9630 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9631 != CODE_FOR_nothing))
9633 do_jump (convert (type, exp), if_false_label, if_true_label);
9638 case TRUTH_NOT_EXPR:
9639 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9642 case TRUTH_ANDIF_EXPR:
9643 if (if_false_label == 0)
9644 if_false_label = drop_through_label = gen_label_rtx ();
9645 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9646 start_cleanup_deferral ();
9647 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9648 end_cleanup_deferral ();
9651 case TRUTH_ORIF_EXPR:
9652 if (if_true_label == 0)
9653 if_true_label = drop_through_label = gen_label_rtx ();
9654 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9655 start_cleanup_deferral ();
9656 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9657 end_cleanup_deferral ();
9662 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9663 preserve_temp_slots (NULL_RTX);
9667 do_pending_stack_adjust ();
9668 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9674 case ARRAY_RANGE_REF:
9676 HOST_WIDE_INT bitsize, bitpos;
9678 enum machine_mode mode;
9682 unsigned int alignment;
9684 /* Get description of this reference. We don't actually care
9685 about the underlying object here. */
9686 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9687 &unsignedp, &volatilep, &alignment);
9689 type = type_for_size (bitsize, unsignedp);
9690 if (! SLOW_BYTE_ACCESS
9691 && type != 0 && bitsize >= 0
9692 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9693 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9694 != CODE_FOR_nothing))
9696 do_jump (convert (type, exp), if_false_label, if_true_label);
9703 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9704 if (integer_onep (TREE_OPERAND (exp, 1))
9705 && integer_zerop (TREE_OPERAND (exp, 2)))
9706 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9708 else if (integer_zerop (TREE_OPERAND (exp, 1))
9709 && integer_onep (TREE_OPERAND (exp, 2)))
9710 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9714 register rtx label1 = gen_label_rtx ();
9715 drop_through_label = gen_label_rtx ();
9717 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9719 start_cleanup_deferral ();
9720 /* Now the THEN-expression. */
9721 do_jump (TREE_OPERAND (exp, 1),
9722 if_false_label ? if_false_label : drop_through_label,
9723 if_true_label ? if_true_label : drop_through_label);
9724 /* In case the do_jump just above never jumps. */
9725 do_pending_stack_adjust ();
9726 emit_label (label1);
9728 /* Now the ELSE-expression. */
9729 do_jump (TREE_OPERAND (exp, 2),
9730 if_false_label ? if_false_label : drop_through_label,
9731 if_true_label ? if_true_label : drop_through_label);
9732 end_cleanup_deferral ();
9738 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9740 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9741 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9743 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9744 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9747 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9748 fold (build (EQ_EXPR, TREE_TYPE (exp),
9749 fold (build1 (REALPART_EXPR,
9750 TREE_TYPE (inner_type),
9752 fold (build1 (REALPART_EXPR,
9753 TREE_TYPE (inner_type),
9755 fold (build (EQ_EXPR, TREE_TYPE (exp),
9756 fold (build1 (IMAGPART_EXPR,
9757 TREE_TYPE (inner_type),
9759 fold (build1 (IMAGPART_EXPR,
9760 TREE_TYPE (inner_type),
9762 if_false_label, if_true_label);
9765 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9766 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9768 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9769 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9770 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9772 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9778 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9780 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9781 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9783 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9784 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9787 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9788 fold (build (NE_EXPR, TREE_TYPE (exp),
9789 fold (build1 (REALPART_EXPR,
9790 TREE_TYPE (inner_type),
9792 fold (build1 (REALPART_EXPR,
9793 TREE_TYPE (inner_type),
9795 fold (build (NE_EXPR, TREE_TYPE (exp),
9796 fold (build1 (IMAGPART_EXPR,
9797 TREE_TYPE (inner_type),
9799 fold (build1 (IMAGPART_EXPR,
9800 TREE_TYPE (inner_type),
9802 if_false_label, if_true_label);
9805 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9806 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9808 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9809 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9810 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9812 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9817 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9818 if (GET_MODE_CLASS (mode) == MODE_INT
9819 && ! can_compare_p (LT, mode, ccp_jump))
9820 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9822 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9826 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9827 if (GET_MODE_CLASS (mode) == MODE_INT
9828 && ! can_compare_p (LE, mode, ccp_jump))
9829 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9831 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9835 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9836 if (GET_MODE_CLASS (mode) == MODE_INT
9837 && ! can_compare_p (GT, mode, ccp_jump))
9838 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9840 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9844 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9845 if (GET_MODE_CLASS (mode) == MODE_INT
9846 && ! can_compare_p (GE, mode, ccp_jump))
9847 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9849 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9852 case UNORDERED_EXPR:
9855 enum rtx_code cmp, rcmp;
9858 if (code == UNORDERED_EXPR)
9859 cmp = UNORDERED, rcmp = ORDERED;
9861 cmp = ORDERED, rcmp = UNORDERED;
9862 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9865 if (! can_compare_p (cmp, mode, ccp_jump)
9866 && (can_compare_p (rcmp, mode, ccp_jump)
9867 /* If the target doesn't provide either UNORDERED or ORDERED
9868 comparisons, canonicalize on UNORDERED for the library. */
9869 || rcmp == UNORDERED))
9873 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9875 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9880 enum rtx_code rcode1;
9881 enum tree_code tcode2;
9905 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9906 if (can_compare_p (rcode1, mode, ccp_jump))
9907 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9911 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9912 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9915 /* If the target doesn't support combined unordered
9916 compares, decompose into UNORDERED + comparison. */
9917 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9918 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9919 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9920 do_jump (exp, if_false_label, if_true_label);
9926 __builtin_expect (<test>, 0) and
9927 __builtin_expect (<test>, 1)
9929 We need to do this here, so that <test> is not converted to a SCC
9930 operation on machines that use condition code registers and COMPARE
9931 like the PowerPC, and then the jump is done based on whether the SCC
9932 operation produced a 1 or 0. */
9934 /* Check for a built-in function. */
9935 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9937 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9938 tree arglist = TREE_OPERAND (exp, 1);
9940 if (TREE_CODE (fndecl) == FUNCTION_DECL
9941 && DECL_BUILT_IN (fndecl)
9942 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9943 && arglist != NULL_TREE
9944 && TREE_CHAIN (arglist) != NULL_TREE)
9946 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9949 if (seq != NULL_RTX)
9956 /* fall through and generate the normal code. */
9960 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9962 /* This is not needed any more and causes poor code since it causes
9963 comparisons and tests from non-SI objects to have different code
9965 /* Copy to register to avoid generating bad insns by cse
9966 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9967 if (!cse_not_expected && GET_CODE (temp) == MEM)
9968 temp = copy_to_reg (temp);
9970 do_pending_stack_adjust ();
9971 /* Do any postincrements in the expression that was tested. */
9974 if (GET_CODE (temp) == CONST_INT
9975 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9976 || GET_CODE (temp) == LABEL_REF)
9978 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9982 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9983 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9984 /* Note swapping the labels gives us not-equal. */
9985 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9986 else if (GET_MODE (temp) != VOIDmode)
9987 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9988 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9989 GET_MODE (temp), NULL_RTX, 0,
9990 if_false_label, if_true_label);
9995 if (drop_through_label)
9997 /* If do_jump produces code that might be jumped around,
9998 do any stack adjusts from that code, before the place
9999 where control merges in. */
10000 do_pending_stack_adjust ();
10001 emit_label (drop_through_label);
10005 /* Given a comparison expression EXP for values too wide to be compared
10006 with one insn, test the comparison and jump to the appropriate label.
10007 The code of EXP is ignored; we always test GT if SWAP is 0,
10008 and LT if SWAP is 1. */
10011 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10014 rtx if_false_label, if_true_label;
10016 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10017 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10018 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10019 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10021 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10024 /* Compare OP0 with OP1, word at a time, in mode MODE.
10025 UNSIGNEDP says to do unsigned comparison.
10026 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10029 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10030 enum machine_mode mode;
10033 rtx if_false_label, if_true_label;
10035 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10036 rtx drop_through_label = 0;
10039 if (! if_true_label || ! if_false_label)
10040 drop_through_label = gen_label_rtx ();
10041 if (! if_true_label)
10042 if_true_label = drop_through_label;
10043 if (! if_false_label)
10044 if_false_label = drop_through_label;
10046 /* Compare a word at a time, high order first. */
10047 for (i = 0; i < nwords; i++)
10049 rtx op0_word, op1_word;
10051 if (WORDS_BIG_ENDIAN)
10053 op0_word = operand_subword_force (op0, i, mode);
10054 op1_word = operand_subword_force (op1, i, mode);
10058 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10059 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10062 /* All but high-order word must be compared as unsigned. */
10063 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10064 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10065 NULL_RTX, if_true_label);
10067 /* Consider lower words only if these are equal. */
10068 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10069 NULL_RTX, 0, NULL_RTX, if_false_label);
10072 if (if_false_label)
10073 emit_jump (if_false_label);
10074 if (drop_through_label)
10075 emit_label (drop_through_label);
10078 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10079 with one insn, test the comparison and jump to the appropriate label. */
10082 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10084 rtx if_false_label, if_true_label;
10086 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10087 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10088 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10089 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10091 rtx drop_through_label = 0;
10093 if (! if_false_label)
10094 drop_through_label = if_false_label = gen_label_rtx ();
10096 for (i = 0; i < nwords; i++)
10097 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10098 operand_subword_force (op1, i, mode),
10099 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10100 word_mode, NULL_RTX, 0, if_false_label,
10104 emit_jump (if_true_label);
10105 if (drop_through_label)
10106 emit_label (drop_through_label);
10109 /* Jump according to whether OP0 is 0.
10110 We assume that OP0 has an integer mode that is too wide
10111 for the available compare insns. */
10114 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10116 rtx if_false_label, if_true_label;
10118 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10121 rtx drop_through_label = 0;
10123 /* The fastest way of doing this comparison on almost any machine is to
10124 "or" all the words and compare the result. If all have to be loaded
10125 from memory and this is a very wide item, it's possible this may
10126 be slower, but that's highly unlikely. */
10128 part = gen_reg_rtx (word_mode);
10129 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10130 for (i = 1; i < nwords && part != 0; i++)
10131 part = expand_binop (word_mode, ior_optab, part,
10132 operand_subword_force (op0, i, GET_MODE (op0)),
10133 part, 1, OPTAB_WIDEN);
10137 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10138 NULL_RTX, 0, if_false_label, if_true_label);
10143 /* If we couldn't do the "or" simply, do this with a series of compares. */
10144 if (! if_false_label)
10145 drop_through_label = if_false_label = gen_label_rtx ();
10147 for (i = 0; i < nwords; i++)
10148 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10149 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10150 if_false_label, NULL_RTX);
10153 emit_jump (if_true_label);
10155 if (drop_through_label)
10156 emit_label (drop_through_label);
10159 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10160 (including code to compute the values to be compared)
10161 and set (CC0) according to the result.
10162 The decision as to signed or unsigned comparison must be made by the caller.
10164 We force a stack adjustment unless there are currently
10165 things pushed on the stack that aren't yet used.
10167 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10170 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10171 size of MODE should be used. */
10174 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10175 register rtx op0, op1;
10176 enum rtx_code code;
10178 enum machine_mode mode;
10180 unsigned int align;
10184 /* If one operand is constant, make it the second one. Only do this
10185 if the other operand is not constant as well. */
10187 if (swap_commutative_operands_p (op0, op1))
10192 code = swap_condition (code);
10195 if (flag_force_mem)
10197 op0 = force_not_mem (op0);
10198 op1 = force_not_mem (op1);
10201 do_pending_stack_adjust ();
10203 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10204 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10208 /* There's no need to do this now that combine.c can eliminate lots of
10209 sign extensions. This can be less efficient in certain cases on other
10212 /* If this is a signed equality comparison, we can do it as an
10213 unsigned comparison since zero-extension is cheaper than sign
10214 extension and comparisons with zero are done as unsigned. This is
10215 the case even on machines that can do fast sign extension, since
10216 zero-extension is easier to combine with other operations than
10217 sign-extension is. If we are comparing against a constant, we must
10218 convert it to what it would look like unsigned. */
10219 if ((code == EQ || code == NE) && ! unsignedp
10220 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10222 if (GET_CODE (op1) == CONST_INT
10223 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10224 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10229 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10231 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10234 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10235 The decision as to signed or unsigned comparison must be made by the caller.
10237 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10240 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10241 size of MODE should be used. */
10244 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10245 if_false_label, if_true_label)
10246 register rtx op0, op1;
10247 enum rtx_code code;
10249 enum machine_mode mode;
10251 unsigned int align;
10252 rtx if_false_label, if_true_label;
10255 int dummy_true_label = 0;
10257 /* Reverse the comparison if that is safe and we want to jump if it is
10259 if (! if_true_label && ! FLOAT_MODE_P (mode))
10261 if_true_label = if_false_label;
10262 if_false_label = 0;
10263 code = reverse_condition (code);
10266 /* If one operand is constant, make it the second one. Only do this
10267 if the other operand is not constant as well. */
10269 if (swap_commutative_operands_p (op0, op1))
10274 code = swap_condition (code);
10277 if (flag_force_mem)
10279 op0 = force_not_mem (op0);
10280 op1 = force_not_mem (op1);
10283 do_pending_stack_adjust ();
10285 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10286 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10288 if (tem == const_true_rtx)
10291 emit_jump (if_true_label);
10295 if (if_false_label)
10296 emit_jump (if_false_label);
10302 /* There's no need to do this now that combine.c can eliminate lots of
10303 sign extensions. This can be less efficient in certain cases on other
10306 /* If this is a signed equality comparison, we can do it as an
10307 unsigned comparison since zero-extension is cheaper than sign
10308 extension and comparisons with zero are done as unsigned. This is
10309 the case even on machines that can do fast sign extension, since
10310 zero-extension is easier to combine with other operations than
10311 sign-extension is. If we are comparing against a constant, we must
10312 convert it to what it would look like unsigned. */
10313 if ((code == EQ || code == NE) && ! unsignedp
10314 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10316 if (GET_CODE (op1) == CONST_INT
10317 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10318 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10323 if (! if_true_label)
10325 dummy_true_label = 1;
10326 if_true_label = gen_label_rtx ();
10329 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10332 if (if_false_label)
10333 emit_jump (if_false_label);
10334 if (dummy_true_label)
10335 emit_label (if_true_label);
10338 /* Generate code for a comparison expression EXP (including code to compute
10339 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10340 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10341 generated code will drop through.
10342 SIGNED_CODE should be the rtx operation for this comparison for
10343 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10345 We force a stack adjustment unless there are currently
10346 things pushed on the stack that aren't yet used. */
10349 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10352 enum rtx_code signed_code, unsigned_code;
10353 rtx if_false_label, if_true_label;
10355 unsigned int align0, align1;
10356 register rtx op0, op1;
10357 register tree type;
10358 register enum machine_mode mode;
10360 enum rtx_code code;
10362 /* Don't crash if the comparison was erroneous. */
10363 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10364 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10367 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10368 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10371 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10372 mode = TYPE_MODE (type);
10373 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10374 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10375 || (GET_MODE_BITSIZE (mode)
10376 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10379 /* op0 might have been replaced by promoted constant, in which
10380 case the type of second argument should be used. */
10381 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10382 mode = TYPE_MODE (type);
10384 unsignedp = TREE_UNSIGNED (type);
10385 code = unsignedp ? unsigned_code : signed_code;
10387 #ifdef HAVE_canonicalize_funcptr_for_compare
10388 /* If function pointers need to be "canonicalized" before they can
10389 be reliably compared, then canonicalize them. */
10390 if (HAVE_canonicalize_funcptr_for_compare
10391 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10392 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10395 rtx new_op0 = gen_reg_rtx (mode);
10397 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10401 if (HAVE_canonicalize_funcptr_for_compare
10402 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10403 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10406 rtx new_op1 = gen_reg_rtx (mode);
10408 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10413 /* Do any postincrements in the expression that was tested. */
10416 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10418 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10419 MIN (align0, align1),
10420 if_false_label, if_true_label);
10423 /* Generate code to calculate EXP using a store-flag instruction
10424 and return an rtx for the result. EXP is either a comparison
10425 or a TRUTH_NOT_EXPR whose operand is a comparison.
10427 If TARGET is nonzero, store the result there if convenient.
10429 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10432 Return zero if there is no suitable set-flag instruction
10433 available on this machine.
10435 Once expand_expr has been called on the arguments of the comparison,
10436 we are committed to doing the store flag, since it is not safe to
10437 re-evaluate the expression. We emit the store-flag insn by calling
10438 emit_store_flag, but only expand the arguments if we have a reason
10439 to believe that emit_store_flag will be successful. If we think that
10440 it will, but it isn't, we have to simulate the store-flag with a
10441 set/jump/set sequence. */
10444 do_store_flag (exp, target, mode, only_cheap)
10447 enum machine_mode mode;
10450 enum rtx_code code;
10451 tree arg0, arg1, type;
10453 enum machine_mode operand_mode;
10457 enum insn_code icode;
10458 rtx subtarget = target;
10461 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10462 result at the end. We can't simply invert the test since it would
10463 have already been inverted if it were valid. This case occurs for
10464 some floating-point comparisons. */
10466 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10467 invert = 1, exp = TREE_OPERAND (exp, 0);
10469 arg0 = TREE_OPERAND (exp, 0);
10470 arg1 = TREE_OPERAND (exp, 1);
10472 /* Don't crash if the comparison was erroneous. */
10473 if (arg0 == error_mark_node || arg1 == error_mark_node)
10476 type = TREE_TYPE (arg0);
10477 operand_mode = TYPE_MODE (type);
10478 unsignedp = TREE_UNSIGNED (type);
10480 /* We won't bother with BLKmode store-flag operations because it would mean
10481 passing a lot of information to emit_store_flag. */
10482 if (operand_mode == BLKmode)
10485 /* We won't bother with store-flag operations involving function pointers
10486 when function pointers must be canonicalized before comparisons. */
10487 #ifdef HAVE_canonicalize_funcptr_for_compare
10488 if (HAVE_canonicalize_funcptr_for_compare
10489 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10490 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10492 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10493 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10494 == FUNCTION_TYPE))))
10501 /* Get the rtx comparison code to use. We know that EXP is a comparison
10502 operation of some type. Some comparisons against 1 and -1 can be
10503 converted to comparisons with zero. Do so here so that the tests
10504 below will be aware that we have a comparison with zero. These
10505 tests will not catch constants in the first operand, but constants
10506 are rarely passed as the first operand. */
10508 switch (TREE_CODE (exp))
10517 if (integer_onep (arg1))
10518 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10520 code = unsignedp ? LTU : LT;
10523 if (! unsignedp && integer_all_onesp (arg1))
10524 arg1 = integer_zero_node, code = LT;
10526 code = unsignedp ? LEU : LE;
10529 if (! unsignedp && integer_all_onesp (arg1))
10530 arg1 = integer_zero_node, code = GE;
10532 code = unsignedp ? GTU : GT;
10535 if (integer_onep (arg1))
10536 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10538 code = unsignedp ? GEU : GE;
10541 case UNORDERED_EXPR:
10567 /* Put a constant second. */
10568 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10570 tem = arg0; arg0 = arg1; arg1 = tem;
10571 code = swap_condition (code);
10574 /* If this is an equality or inequality test of a single bit, we can
10575 do this by shifting the bit being tested to the low-order bit and
10576 masking the result with the constant 1. If the condition was EQ,
10577 we xor it with 1. This does not require an scc insn and is faster
10578 than an scc insn even if we have it. */
10580 if ((code == NE || code == EQ)
10581 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10582 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10584 tree inner = TREE_OPERAND (arg0, 0);
10585 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10588 /* If INNER is a right shift of a constant and it plus BITNUM does
10589 not overflow, adjust BITNUM and INNER. */
10591 if (TREE_CODE (inner) == RSHIFT_EXPR
10592 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10593 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10594 && bitnum < TYPE_PRECISION (type)
10595 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10596 bitnum - TYPE_PRECISION (type)))
10598 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10599 inner = TREE_OPERAND (inner, 0);
10602 /* If we are going to be able to omit the AND below, we must do our
10603 operations as unsigned. If we must use the AND, we have a choice.
10604 Normally unsigned is faster, but for some machines signed is. */
10605 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10606 #ifdef LOAD_EXTEND_OP
10607 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10613 if (! get_subtarget (subtarget)
10614 || GET_MODE (subtarget) != operand_mode
10615 || ! safe_from_p (subtarget, inner, 1))
10618 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10621 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10622 size_int (bitnum), subtarget, ops_unsignedp);
10624 if (GET_MODE (op0) != mode)
10625 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10627 if ((code == EQ && ! invert) || (code == NE && invert))
10628 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10629 ops_unsignedp, OPTAB_LIB_WIDEN);
10631 /* Put the AND last so it can combine with more things. */
10632 if (bitnum != TYPE_PRECISION (type) - 1)
10633 op0 = expand_and (op0, const1_rtx, subtarget);
10638 /* Now see if we are likely to be able to do this. Return if not. */
10639 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10642 icode = setcc_gen_code[(int) code];
10643 if (icode == CODE_FOR_nothing
10644 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10646 /* We can only do this if it is one of the special cases that
10647 can be handled without an scc insn. */
10648 if ((code == LT && integer_zerop (arg1))
10649 || (! only_cheap && code == GE && integer_zerop (arg1)))
10651 else if (BRANCH_COST >= 0
10652 && ! only_cheap && (code == NE || code == EQ)
10653 && TREE_CODE (type) != REAL_TYPE
10654 && ((abs_optab->handlers[(int) operand_mode].insn_code
10655 != CODE_FOR_nothing)
10656 || (ffs_optab->handlers[(int) operand_mode].insn_code
10657 != CODE_FOR_nothing)))
10663 if (! get_subtarget (target)
10664 || GET_MODE (subtarget) != operand_mode
10665 || ! safe_from_p (subtarget, arg1, 1))
10668 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10669 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10672 target = gen_reg_rtx (mode);
10674 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10675 because, if the emit_store_flag does anything it will succeed and
10676 OP0 and OP1 will not be used subsequently. */
10678 result = emit_store_flag (target, code,
10679 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10680 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10681 operand_mode, unsignedp, 1);
10686 result = expand_binop (mode, xor_optab, result, const1_rtx,
10687 result, 0, OPTAB_LIB_WIDEN);
10691 /* If this failed, we have to do this with set/compare/jump/set code. */
10692 if (GET_CODE (target) != REG
10693 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10694 target = gen_reg_rtx (GET_MODE (target));
10696 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10697 result = compare_from_rtx (op0, op1, code, unsignedp,
10698 operand_mode, NULL_RTX, 0);
10699 if (GET_CODE (result) == CONST_INT)
10700 return (((result == const0_rtx && ! invert)
10701 || (result != const0_rtx && invert))
10702 ? const0_rtx : const1_rtx);
10704 label = gen_label_rtx ();
10705 if (bcc_gen_fctn[(int) code] == 0)
10708 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10709 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10710 emit_label (label);
10716 /* Stubs in case we haven't got a casesi insn. */
10717 #ifndef HAVE_casesi
10718 # define HAVE_casesi 0
10719 # define gen_casesi(a, b, c, d, e) (0)
10720 # define CODE_FOR_casesi CODE_FOR_nothing
10723 /* If the machine does not have a case insn that compares the bounds,
10724 this means extra overhead for dispatch tables, which raises the
10725 threshold for using them. */
10726 #ifndef CASE_VALUES_THRESHOLD
10727 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10728 #endif /* CASE_VALUES_THRESHOLD */
10731 case_values_threshold ()
10733 return CASE_VALUES_THRESHOLD;
10736 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10737 0 otherwise (i.e. if there is no casesi instruction). */
10739 try_casesi (index_type, index_expr, minval, range,
10740 table_label, default_label)
10741 tree index_type, index_expr, minval, range;
10742 rtx table_label ATTRIBUTE_UNUSED;
10745 enum machine_mode index_mode = SImode;
10746 int index_bits = GET_MODE_BITSIZE (index_mode);
10747 rtx op1, op2, index;
10748 enum machine_mode op_mode;
10753 /* Convert the index to SImode. */
10754 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10756 enum machine_mode omode = TYPE_MODE (index_type);
10757 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10759 /* We must handle the endpoints in the original mode. */
10760 index_expr = build (MINUS_EXPR, index_type,
10761 index_expr, minval);
10762 minval = integer_zero_node;
10763 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10764 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10765 omode, 1, 0, default_label);
10766 /* Now we can safely truncate. */
10767 index = convert_to_mode (index_mode, index, 0);
10771 if (TYPE_MODE (index_type) != index_mode)
10773 index_expr = convert (type_for_size (index_bits, 0),
10775 index_type = TREE_TYPE (index_expr);
10778 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10781 index = protect_from_queue (index, 0);
10782 do_pending_stack_adjust ();
10784 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10785 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10787 index = copy_to_mode_reg (op_mode, index);
10789 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10791 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10792 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10793 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10794 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10796 op1 = copy_to_mode_reg (op_mode, op1);
10798 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10800 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10801 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10802 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10803 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10805 op2 = copy_to_mode_reg (op_mode, op2);
10807 emit_jump_insn (gen_casesi (index, op1, op2,
10808 table_label, default_label));
10812 /* Attempt to generate a tablejump instruction; same concept. */
10813 #ifndef HAVE_tablejump
10814 #define HAVE_tablejump 0
10815 #define gen_tablejump(x, y) (0)
10818 /* Subroutine of the next function.
10820 INDEX is the value being switched on, with the lowest value
10821 in the table already subtracted.
10822 MODE is its expected mode (needed if INDEX is constant).
10823 RANGE is the length of the jump table.
10824 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10826 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10827 index value is out of range. */
10830 do_tablejump (index, mode, range, table_label, default_label)
10831 rtx index, range, table_label, default_label;
10832 enum machine_mode mode;
10834 register rtx temp, vector;
10836 /* Do an unsigned comparison (in the proper mode) between the index
10837 expression and the value which represents the length of the range.
10838 Since we just finished subtracting the lower bound of the range
10839 from the index expression, this comparison allows us to simultaneously
10840 check that the original index expression value is both greater than
10841 or equal to the minimum value of the range and less than or equal to
10842 the maximum value of the range. */
10844 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10847 /* If index is in range, it must fit in Pmode.
10848 Convert to Pmode so we can index with it. */
10850 index = convert_to_mode (Pmode, index, 1);
10852 /* Don't let a MEM slip thru, because then INDEX that comes
10853 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10854 and break_out_memory_refs will go to work on it and mess it up. */
10855 #ifdef PIC_CASE_VECTOR_ADDRESS
10856 if (flag_pic && GET_CODE (index) != REG)
10857 index = copy_to_mode_reg (Pmode, index);
10860 /* If flag_force_addr were to affect this address
10861 it could interfere with the tricky assumptions made
10862 about addresses that contain label-refs,
10863 which may be valid only very near the tablejump itself. */
10864 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10865 GET_MODE_SIZE, because this indicates how large insns are. The other
10866 uses should all be Pmode, because they are addresses. This code
10867 could fail if addresses and insns are not the same size. */
10868 index = gen_rtx_PLUS (Pmode,
10869 gen_rtx_MULT (Pmode, index,
10870 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10871 gen_rtx_LABEL_REF (Pmode, table_label));
10872 #ifdef PIC_CASE_VECTOR_ADDRESS
10874 index = PIC_CASE_VECTOR_ADDRESS (index);
10877 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10878 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10879 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10880 RTX_UNCHANGING_P (vector) = 1;
10881 convert_move (temp, vector, 0);
10883 emit_jump_insn (gen_tablejump (temp, table_label));
10885 /* If we are generating PIC code or if the table is PC-relative, the
10886 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10887 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10892 try_tablejump (index_type, index_expr, minval, range,
10893 table_label, default_label)
10894 tree index_type, index_expr, minval, range;
10895 rtx table_label, default_label;
10899 if (! HAVE_tablejump)
10902 index_expr = fold (build (MINUS_EXPR, index_type,
10903 convert (index_type, index_expr),
10904 convert (index_type, minval)));
10905 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10907 index = protect_from_queue (index, 0);
10908 do_pending_stack_adjust ();
10910 do_tablejump (index, TYPE_MODE (index_type),
10911 convert_modes (TYPE_MODE (index_type),
10912 TYPE_MODE (TREE_TYPE (range)),
10913 expand_expr (range, NULL_RTX,
10915 TREE_UNSIGNED (TREE_TYPE (range))),
10916 table_label, default_label);