1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first. */
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
65 #define STACK_PUSH_CODE PRE_INC
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
74 /* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
81 int (*lang_safe_from_p) PARAMS ((rtx, tree));
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* Don't check memory usage, since code is being emitted to check a memory
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
94 static int in_check_memory_usage;
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
99 /* This structure is used by move_by_pieces to describe the move to
101 struct move_by_pieces
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
116 /* This structure is used by store_by_pieces to describe the clear to
119 struct store_by_pieces
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
132 extern struct obstack permanent_obstack;
134 static rtx get_push_address PARAMS ((int));
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
140 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
150 struct store_by_pieces *));
151 static rtx get_subtarget PARAMS ((rtx));
152 static int is_zeros_p PARAMS ((tree));
153 static int mostly_zeros_p PARAMS ((tree));
154 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, unsigned int, int,
158 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int,
163 unsigned int, HOST_WIDE_INT, int));
164 static enum memory_use_mode
165 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
166 static rtx var_rtx PARAMS ((tree));
167 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
176 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
178 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
180 /* Record for each mode whether we can move a register directly to or
181 from an object of that mode in memory. If we can't, we won't try
182 to use that mode directly when accessing a field of that mode. */
184 static char direct_load[NUM_MACHINE_MODES];
185 static char direct_store[NUM_MACHINE_MODES];
187 /* If a memory-to-memory move would take MOVE_RATIO or more simple
188 move-instruction sequences, we will do a movstr or libcall instead. */
191 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
194 /* If we are optimizing for space (-Os), cut down the default move ratio. */
195 #define MOVE_RATIO (optimize_size ? 3 : 15)
199 /* This macro is used to determine whether move_by_pieces should be called
200 to perform a structure copy. */
201 #ifndef MOVE_BY_PIECES_P
202 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
206 /* This array records the insn_code of insns to perform block moves. */
207 enum insn_code movstr_optab[NUM_MACHINE_MODES];
209 /* This array records the insn_code of insns to perform block clears. */
210 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
212 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
214 #ifndef SLOW_UNALIGNED_ACCESS
215 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
218 /* This is run once per compilation to set up which modes can be used
219 directly in memory and to initialize the block move optab. */
225 enum machine_mode mode;
231 /* Try indexing by frame ptr and try by stack ptr.
232 It is known that on the Convex the stack ptr isn't a valid index.
233 With luck, one or the other is valid on any machine. */
234 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
235 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
237 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
238 pat = PATTERN (insn);
240 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
241 mode = (enum machine_mode) ((int) mode + 1))
246 direct_load[(int) mode] = direct_store[(int) mode] = 0;
247 PUT_MODE (mem, mode);
248 PUT_MODE (mem1, mode);
250 /* See if there is some register that can be used in this mode and
251 directly loaded or stored from memory. */
253 if (mode != VOIDmode && mode != BLKmode)
254 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
255 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
258 if (! HARD_REGNO_MODE_OK (regno, mode))
261 reg = gen_rtx_REG (mode, regno);
264 SET_DEST (pat) = reg;
265 if (recog (pat, insn, &num_clobbers) >= 0)
266 direct_load[(int) mode] = 1;
268 SET_SRC (pat) = mem1;
269 SET_DEST (pat) = reg;
270 if (recog (pat, insn, &num_clobbers) >= 0)
271 direct_load[(int) mode] = 1;
274 SET_DEST (pat) = mem;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_store[(int) mode] = 1;
279 SET_DEST (pat) = mem1;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_store[(int) mode] = 1;
288 /* This is run at the start of compiling a function. */
293 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
296 pending_stack_adjust = 0;
297 stack_pointer_delta = 0;
298 inhibit_defer_pop = 0;
300 apply_args_value = 0;
306 struct expr_status *p;
311 ggc_mark_rtx (p->x_saveregs_value);
312 ggc_mark_rtx (p->x_apply_args_value);
313 ggc_mark_rtx (p->x_forced_labels);
324 /* Small sanity check that the queue is empty at the end of a function. */
327 finish_expr_for_function ()
333 /* Manage the queue of increment instructions to be output
334 for POSTINCREMENT_EXPR expressions, etc. */
336 /* Queue up to increment (or change) VAR later. BODY says how:
337 BODY should be the same thing you would pass to emit_insn
338 to increment right away. It will go to emit_insn later on.
340 The value is a QUEUED expression to be used in place of VAR
341 where you want to guarantee the pre-incrementation value of VAR. */
344 enqueue_insn (var, body)
347 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
348 body, pending_chain);
349 return pending_chain;
352 /* Use protect_from_queue to convert a QUEUED expression
353 into something that you can put immediately into an instruction.
354 If the queued incrementation has not happened yet,
355 protect_from_queue returns the variable itself.
356 If the incrementation has happened, protect_from_queue returns a temp
357 that contains a copy of the old value of the variable.
359 Any time an rtx which might possibly be a QUEUED is to be put
360 into an instruction, it must be passed through protect_from_queue first.
361 QUEUED expressions are not meaningful in instructions.
363 Do not pass a value through protect_from_queue and then hold
364 on to it for a while before putting it in an instruction!
365 If the queue is flushed in between, incorrect code will result. */
368 protect_from_queue (x, modify)
372 RTX_CODE code = GET_CODE (x);
374 #if 0 /* A QUEUED can hang around after the queue is forced out. */
375 /* Shortcut for most common case. */
376 if (pending_chain == 0)
382 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
383 use of autoincrement. Make a copy of the contents of the memory
384 location rather than a copy of the address, but not if the value is
385 of mode BLKmode. Don't modify X in place since it might be
387 if (code == MEM && GET_MODE (x) != BLKmode
388 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
391 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
395 rtx temp = gen_reg_rtx (GET_MODE (x));
397 emit_insn_before (gen_move_insn (temp, new),
402 /* Copy the address into a pseudo, so that the returned value
403 remains correct across calls to emit_queue. */
404 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
407 /* Otherwise, recursively protect the subexpressions of all
408 the kinds of rtx's that can contain a QUEUED. */
411 rtx tem = protect_from_queue (XEXP (x, 0), 0);
412 if (tem != XEXP (x, 0))
418 else if (code == PLUS || code == MULT)
420 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
421 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
422 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
431 /* If the increment has not happened, use the variable itself. Copy it
432 into a new pseudo so that the value remains correct across calls to
434 if (QUEUED_INSN (x) == 0)
435 return copy_to_reg (QUEUED_VAR (x));
436 /* If the increment has happened and a pre-increment copy exists,
438 if (QUEUED_COPY (x) != 0)
439 return QUEUED_COPY (x);
440 /* The increment has happened but we haven't set up a pre-increment copy.
441 Set one up now, and use it. */
442 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
443 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
445 return QUEUED_COPY (x);
448 /* Return nonzero if X contains a QUEUED expression:
449 if it contains anything that will be altered by a queued increment.
450 We handle only combinations of MEM, PLUS, MINUS and MULT operators
451 since memory addresses generally contain only those. */
457 enum rtx_code code = GET_CODE (x);
463 return queued_subexp_p (XEXP (x, 0));
467 return (queued_subexp_p (XEXP (x, 0))
468 || queued_subexp_p (XEXP (x, 1)));
474 /* Perform all the pending incrementations. */
480 while ((p = pending_chain))
482 rtx body = QUEUED_BODY (p);
484 if (GET_CODE (body) == SEQUENCE)
486 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
487 emit_insn (QUEUED_BODY (p));
490 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
491 pending_chain = QUEUED_NEXT (p);
495 /* Copy data from FROM to TO, where the machine modes are not the same.
496 Both modes may be integer, or both may be floating.
497 UNSIGNEDP should be nonzero if FROM is an unsigned type.
498 This causes zero-extension instead of sign-extension. */
501 convert_move (to, from, unsignedp)
505 enum machine_mode to_mode = GET_MODE (to);
506 enum machine_mode from_mode = GET_MODE (from);
507 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
508 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
512 /* rtx code for making an equivalent value. */
513 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
515 to = protect_from_queue (to, 1);
516 from = protect_from_queue (from, 0);
518 if (to_real != from_real)
521 /* If FROM is a SUBREG that indicates that we have already done at least
522 the required extension, strip it. We don't handle such SUBREGs as
525 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
526 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
527 >= GET_MODE_SIZE (to_mode))
528 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
529 from = gen_lowpart (to_mode, from), from_mode = to_mode;
531 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
534 if (to_mode == from_mode
535 || (from_mode == VOIDmode && CONSTANT_P (from)))
537 emit_move_insn (to, from);
541 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
543 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
546 if (VECTOR_MODE_P (to_mode))
547 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
549 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
551 emit_move_insn (to, from);
555 if (to_real != from_real)
562 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
564 /* Try converting directly if the insn is supported. */
565 if ((code = can_extend_p (to_mode, from_mode, 0))
568 emit_unop_insn (code, to, from, UNKNOWN);
573 #ifdef HAVE_trunchfqf2
574 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
576 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
580 #ifdef HAVE_trunctqfqf2
581 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
583 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
587 #ifdef HAVE_truncsfqf2
588 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
590 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
594 #ifdef HAVE_truncdfqf2
595 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
597 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
601 #ifdef HAVE_truncxfqf2
602 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
604 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
608 #ifdef HAVE_trunctfqf2
609 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
611 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
616 #ifdef HAVE_trunctqfhf2
617 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
619 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
623 #ifdef HAVE_truncsfhf2
624 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
626 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
630 #ifdef HAVE_truncdfhf2
631 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
633 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
637 #ifdef HAVE_truncxfhf2
638 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
640 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
644 #ifdef HAVE_trunctfhf2
645 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
647 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
652 #ifdef HAVE_truncsftqf2
653 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
655 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
659 #ifdef HAVE_truncdftqf2
660 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
662 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
666 #ifdef HAVE_truncxftqf2
667 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
669 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
673 #ifdef HAVE_trunctftqf2
674 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
676 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
681 #ifdef HAVE_truncdfsf2
682 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
684 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
688 #ifdef HAVE_truncxfsf2
689 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
691 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
695 #ifdef HAVE_trunctfsf2
696 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
698 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
702 #ifdef HAVE_truncxfdf2
703 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
705 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
709 #ifdef HAVE_trunctfdf2
710 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
712 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
724 libcall = extendsfdf2_libfunc;
728 libcall = extendsfxf2_libfunc;
732 libcall = extendsftf2_libfunc;
744 libcall = truncdfsf2_libfunc;
748 libcall = extenddfxf2_libfunc;
752 libcall = extenddftf2_libfunc;
764 libcall = truncxfsf2_libfunc;
768 libcall = truncxfdf2_libfunc;
780 libcall = trunctfsf2_libfunc;
784 libcall = trunctfdf2_libfunc;
796 if (libcall == (rtx) 0)
797 /* This conversion is not implemented yet. */
801 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
803 insns = get_insns ();
805 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
810 /* Now both modes are integers. */
812 /* Handle expanding beyond a word. */
813 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
814 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
821 enum machine_mode lowpart_mode;
822 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
824 /* Try converting directly if the insn is supported. */
825 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 /* If FROM is a SUBREG, put it into a register. Do this
829 so that we always generate the same set of insns for
830 better cse'ing; if an intermediate assignment occurred,
831 we won't be doing the operation directly on the SUBREG. */
832 if (optimize > 0 && GET_CODE (from) == SUBREG)
833 from = force_reg (from_mode, from);
834 emit_unop_insn (code, to, from, equiv_code);
837 /* Next, try converting via full word. */
838 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
839 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
840 != CODE_FOR_nothing))
842 if (GET_CODE (to) == REG)
843 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
844 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
845 emit_unop_insn (code, to,
846 gen_lowpart (word_mode, to), equiv_code);
850 /* No special multiword conversion insn; do it by hand. */
853 /* Since we will turn this into a no conflict block, we must ensure
854 that the source does not overlap the target. */
856 if (reg_overlap_mentioned_p (to, from))
857 from = force_reg (from_mode, from);
859 /* Get a copy of FROM widened to a word, if necessary. */
860 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
861 lowpart_mode = word_mode;
863 lowpart_mode = from_mode;
865 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
867 lowpart = gen_lowpart (lowpart_mode, to);
868 emit_move_insn (lowpart, lowfrom);
870 /* Compute the value to put in each remaining word. */
872 fill_value = const0_rtx;
877 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
878 && STORE_FLAG_VALUE == -1)
880 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
882 fill_value = gen_reg_rtx (word_mode);
883 emit_insn (gen_slt (fill_value));
889 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
890 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
892 fill_value = convert_to_mode (word_mode, fill_value, 1);
896 /* Fill the remaining words. */
897 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
899 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
900 rtx subword = operand_subword (to, index, 1, to_mode);
905 if (fill_value != subword)
906 emit_move_insn (subword, fill_value);
909 insns = get_insns ();
912 emit_no_conflict_block (insns, to, from, NULL_RTX,
913 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
917 /* Truncating multi-word to a word or less. */
918 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
919 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 convert_move (to, gen_lowpart (word_mode, from), 0);
932 /* Handle pointer conversion. */ /* SPEE 900220. */
933 if (to_mode == PQImode)
935 if (from_mode != QImode)
936 from = convert_to_mode (QImode, from, unsignedp);
938 #ifdef HAVE_truncqipqi2
939 if (HAVE_truncqipqi2)
941 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
944 #endif /* HAVE_truncqipqi2 */
948 if (from_mode == PQImode)
950 if (to_mode != QImode)
952 from = convert_to_mode (QImode, from, unsignedp);
957 #ifdef HAVE_extendpqiqi2
958 if (HAVE_extendpqiqi2)
960 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
963 #endif /* HAVE_extendpqiqi2 */
968 if (to_mode == PSImode)
970 if (from_mode != SImode)
971 from = convert_to_mode (SImode, from, unsignedp);
973 #ifdef HAVE_truncsipsi2
974 if (HAVE_truncsipsi2)
976 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
979 #endif /* HAVE_truncsipsi2 */
983 if (from_mode == PSImode)
985 if (to_mode != SImode)
987 from = convert_to_mode (SImode, from, unsignedp);
992 #ifdef HAVE_extendpsisi2
993 if (! unsignedp && HAVE_extendpsisi2)
995 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
998 #endif /* HAVE_extendpsisi2 */
999 #ifdef HAVE_zero_extendpsisi2
1000 if (unsignedp && HAVE_zero_extendpsisi2)
1002 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1005 #endif /* HAVE_zero_extendpsisi2 */
1010 if (to_mode == PDImode)
1012 if (from_mode != DImode)
1013 from = convert_to_mode (DImode, from, unsignedp);
1015 #ifdef HAVE_truncdipdi2
1016 if (HAVE_truncdipdi2)
1018 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1021 #endif /* HAVE_truncdipdi2 */
1025 if (from_mode == PDImode)
1027 if (to_mode != DImode)
1029 from = convert_to_mode (DImode, from, unsignedp);
1034 #ifdef HAVE_extendpdidi2
1035 if (HAVE_extendpdidi2)
1037 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1040 #endif /* HAVE_extendpdidi2 */
1045 /* Now follow all the conversions between integers
1046 no more than a word long. */
1048 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1049 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1050 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1051 GET_MODE_BITSIZE (from_mode)))
1053 if (!((GET_CODE (from) == MEM
1054 && ! MEM_VOLATILE_P (from)
1055 && direct_load[(int) to_mode]
1056 && ! mode_dependent_address_p (XEXP (from, 0)))
1057 || GET_CODE (from) == REG
1058 || GET_CODE (from) == SUBREG))
1059 from = force_reg (from_mode, from);
1060 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1061 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1062 from = copy_to_reg (from);
1063 emit_move_insn (to, gen_lowpart (to_mode, from));
1067 /* Handle extension. */
1068 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1070 /* Convert directly if that works. */
1071 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1072 != CODE_FOR_nothing)
1074 emit_unop_insn (code, to, from, equiv_code);
1079 enum machine_mode intermediate;
1083 /* Search for a mode to convert via. */
1084 for (intermediate = from_mode; intermediate != VOIDmode;
1085 intermediate = GET_MODE_WIDER_MODE (intermediate))
1086 if (((can_extend_p (to_mode, intermediate, unsignedp)
1087 != CODE_FOR_nothing)
1088 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1089 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1090 GET_MODE_BITSIZE (intermediate))))
1091 && (can_extend_p (intermediate, from_mode, unsignedp)
1092 != CODE_FOR_nothing))
1094 convert_move (to, convert_to_mode (intermediate, from,
1095 unsignedp), unsignedp);
1099 /* No suitable intermediate mode.
1100 Generate what we need with shifts. */
1101 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1102 - GET_MODE_BITSIZE (from_mode), 0);
1103 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1104 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1106 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1109 emit_move_insn (to, tmp);
1114 /* Support special truncate insns for certain modes. */
1116 if (from_mode == DImode && to_mode == SImode)
1118 #ifdef HAVE_truncdisi2
1119 if (HAVE_truncdisi2)
1121 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1125 convert_move (to, force_reg (from_mode, from), unsignedp);
1129 if (from_mode == DImode && to_mode == HImode)
1131 #ifdef HAVE_truncdihi2
1132 if (HAVE_truncdihi2)
1134 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1138 convert_move (to, force_reg (from_mode, from), unsignedp);
1142 if (from_mode == DImode && to_mode == QImode)
1144 #ifdef HAVE_truncdiqi2
1145 if (HAVE_truncdiqi2)
1147 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1151 convert_move (to, force_reg (from_mode, from), unsignedp);
1155 if (from_mode == SImode && to_mode == HImode)
1157 #ifdef HAVE_truncsihi2
1158 if (HAVE_truncsihi2)
1160 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1168 if (from_mode == SImode && to_mode == QImode)
1170 #ifdef HAVE_truncsiqi2
1171 if (HAVE_truncsiqi2)
1173 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1181 if (from_mode == HImode && to_mode == QImode)
1183 #ifdef HAVE_trunchiqi2
1184 if (HAVE_trunchiqi2)
1186 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1194 if (from_mode == TImode && to_mode == DImode)
1196 #ifdef HAVE_trunctidi2
1197 if (HAVE_trunctidi2)
1199 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1207 if (from_mode == TImode && to_mode == SImode)
1209 #ifdef HAVE_trunctisi2
1210 if (HAVE_trunctisi2)
1212 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1220 if (from_mode == TImode && to_mode == HImode)
1222 #ifdef HAVE_trunctihi2
1223 if (HAVE_trunctihi2)
1225 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1233 if (from_mode == TImode && to_mode == QImode)
1235 #ifdef HAVE_trunctiqi2
1236 if (HAVE_trunctiqi2)
1238 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1246 /* Handle truncation of volatile memrefs, and so on;
1247 the things that couldn't be truncated directly,
1248 and for which there was no special instruction. */
1249 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1251 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1252 emit_move_insn (to, temp);
1256 /* Mode combination is not recognized. */
1260 /* Return an rtx for a value that would result
1261 from converting X to mode MODE.
1262 Both X and MODE may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1264 This can be done by referring to a part of X in place
1265 or by copying to a new temporary with conversion.
1267 This function *must not* call protect_from_queue
1268 except when putting X into an insn (in which case convert_move does it). */
1271 convert_to_mode (mode, x, unsignedp)
1272 enum machine_mode mode;
1276 return convert_modes (mode, VOIDmode, x, unsignedp);
1279 /* Return an rtx for a value that would result
1280 from converting X from mode OLDMODE to mode MODE.
1281 Both modes may be floating, or both integer.
1282 UNSIGNEDP is nonzero if X is an unsigned value.
1284 This can be done by referring to a part of X in place
1285 or by copying to a new temporary with conversion.
1287 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1289 This function *must not* call protect_from_queue
1290 except when putting X into an insn (in which case convert_move does it). */
1293 convert_modes (mode, oldmode, x, unsignedp)
1294 enum machine_mode mode, oldmode;
1300 /* If FROM is a SUBREG that indicates that we have already done at least
1301 the required extension, strip it. */
1303 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1304 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1305 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1306 x = gen_lowpart (mode, x);
1308 if (GET_MODE (x) != VOIDmode)
1309 oldmode = GET_MODE (x);
1311 if (mode == oldmode)
1314 /* There is one case that we must handle specially: If we are converting
1315 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1316 we are to interpret the constant as unsigned, gen_lowpart will do
1317 the wrong if the constant appears negative. What we want to do is
1318 make the high-order word of the constant zero, not all ones. */
1320 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1321 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1322 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1324 HOST_WIDE_INT val = INTVAL (x);
1326 if (oldmode != VOIDmode
1327 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1329 int width = GET_MODE_BITSIZE (oldmode);
1331 /* We need to zero extend VAL. */
1332 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1335 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1338 /* We can do this with a gen_lowpart if both desired and current modes
1339 are integer, and this is either a constant integer, a register, or a
1340 non-volatile MEM. Except for the constant case where MODE is no
1341 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1343 if ((GET_CODE (x) == CONST_INT
1344 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1345 || (GET_MODE_CLASS (mode) == MODE_INT
1346 && GET_MODE_CLASS (oldmode) == MODE_INT
1347 && (GET_CODE (x) == CONST_DOUBLE
1348 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1349 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1350 && direct_load[(int) mode])
1351 || (GET_CODE (x) == REG
1352 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1353 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1355 /* ?? If we don't know OLDMODE, we have to assume here that
1356 X does not need sign- or zero-extension. This may not be
1357 the case, but it's the best we can do. */
1358 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1359 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1361 HOST_WIDE_INT val = INTVAL (x);
1362 int width = GET_MODE_BITSIZE (oldmode);
1364 /* We must sign or zero-extend in this case. Start by
1365 zero-extending, then sign extend if we need to. */
1366 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1368 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1369 val |= (HOST_WIDE_INT) (-1) << width;
1371 return GEN_INT (trunc_int_for_mode (val, mode));
1374 return gen_lowpart (mode, x);
1377 temp = gen_reg_rtx (mode);
1378 convert_move (temp, x, unsignedp);
1382 /* This macro is used to determine what the largest unit size that
1383 move_by_pieces can use is. */
1385 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1386 move efficiently, as opposed to MOVE_MAX which is the maximum
1387 number of bytes we can move with a single instruction. */
1389 #ifndef MOVE_MAX_PIECES
1390 #define MOVE_MAX_PIECES MOVE_MAX
1393 /* Generate several move instructions to copy LEN bytes from block FROM to
1394 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1395 and TO through protect_from_queue before calling.
1397 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1398 used to push FROM to the stack.
1400 ALIGN is maximum alignment we can assume. */
1403 move_by_pieces (to, from, len, align)
1405 unsigned HOST_WIDE_INT len;
1408 struct move_by_pieces data;
1409 rtx to_addr, from_addr = XEXP (from, 0);
1410 unsigned int max_size = MOVE_MAX_PIECES + 1;
1411 enum machine_mode mode = VOIDmode, tmode;
1412 enum insn_code icode;
1415 data.from_addr = from_addr;
1418 to_addr = XEXP (to, 0);
1421 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1422 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1424 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1431 #ifdef STACK_GROWS_DOWNWARD
1437 data.to_addr = to_addr;
1440 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1441 || GET_CODE (from_addr) == POST_INC
1442 || GET_CODE (from_addr) == POST_DEC);
1444 data.explicit_inc_from = 0;
1445 data.explicit_inc_to = 0;
1446 if (data.reverse) data.offset = len;
1449 /* If copying requires more than two move insns,
1450 copy addresses to registers (to make displacements shorter)
1451 and use post-increment if available. */
1452 if (!(data.autinc_from && data.autinc_to)
1453 && move_by_pieces_ninsns (len, align) > 2)
1455 /* Find the mode of the largest move... */
1456 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1457 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1458 if (GET_MODE_SIZE (tmode) < max_size)
1461 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1463 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1464 data.autinc_from = 1;
1465 data.explicit_inc_from = -1;
1467 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1469 data.from_addr = copy_addr_to_reg (from_addr);
1470 data.autinc_from = 1;
1471 data.explicit_inc_from = 1;
1473 if (!data.autinc_from && CONSTANT_P (from_addr))
1474 data.from_addr = copy_addr_to_reg (from_addr);
1475 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1477 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1479 data.explicit_inc_to = -1;
1481 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1483 data.to_addr = copy_addr_to_reg (to_addr);
1485 data.explicit_inc_to = 1;
1487 if (!data.autinc_to && CONSTANT_P (to_addr))
1488 data.to_addr = copy_addr_to_reg (to_addr);
1491 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1492 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1493 align = MOVE_MAX * BITS_PER_UNIT;
1495 /* First move what we can in the largest integer mode, then go to
1496 successively smaller modes. */
1498 while (max_size > 1)
1500 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1501 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1502 if (GET_MODE_SIZE (tmode) < max_size)
1505 if (mode == VOIDmode)
1508 icode = mov_optab->handlers[(int) mode].insn_code;
1509 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1510 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1512 max_size = GET_MODE_SIZE (mode);
1515 /* The code above should have handled everything. */
1520 /* Return number of insns required to move L bytes by pieces.
1521 ALIGN (in bits) is maximum alignment we can assume. */
1523 static unsigned HOST_WIDE_INT
1524 move_by_pieces_ninsns (l, align)
1525 unsigned HOST_WIDE_INT l;
1528 unsigned HOST_WIDE_INT n_insns = 0;
1529 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1531 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1532 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1533 align = MOVE_MAX * BITS_PER_UNIT;
1535 while (max_size > 1)
1537 enum machine_mode mode = VOIDmode, tmode;
1538 enum insn_code icode;
1540 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1541 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1542 if (GET_MODE_SIZE (tmode) < max_size)
1545 if (mode == VOIDmode)
1548 icode = mov_optab->handlers[(int) mode].insn_code;
1549 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1550 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1552 max_size = GET_MODE_SIZE (mode);
1560 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1561 with move instructions for mode MODE. GENFUN is the gen_... function
1562 to make a move insn for that mode. DATA has all the other info. */
1565 move_by_pieces_1 (genfun, mode, data)
1566 rtx (*genfun) PARAMS ((rtx, ...));
1567 enum machine_mode mode;
1568 struct move_by_pieces *data;
1570 unsigned int size = GET_MODE_SIZE (mode);
1571 rtx to1 = NULL_RTX, from1;
1573 while (data->len >= size)
1576 data->offset -= size;
1580 if (data->autinc_to)
1582 to1 = replace_equiv_address (data->to, data->to_addr);
1583 to1 = adjust_address (to1, mode, 0);
1586 to1 = adjust_address (data->to, mode, data->offset);
1589 if (data->autinc_from)
1591 from1 = replace_equiv_address (data->from, data->from_addr);
1592 from1 = adjust_address (from1, mode, 0);
1595 from1 = adjust_address (data->from, mode, data->offset);
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1598 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1599 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1600 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1603 emit_insn ((*genfun) (to1, from1));
1606 #ifdef PUSH_ROUNDING
1607 emit_single_push_insn (mode, from1, NULL);
1613 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1614 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1615 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1616 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1618 if (! data->reverse)
1619 data->offset += size;
1625 /* Emit code to move a block Y to a block X.
1626 This may be done with string-move instructions,
1627 with multiple scalar move instructions, or with a library call.
1629 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1631 SIZE is an rtx that says how long they are.
1632 ALIGN is the maximum alignment we can assume they have.
1634 Return the address of the new block, if memcpy is called and returns it,
1638 emit_block_move (x, y, size)
1643 #ifdef TARGET_MEM_FUNCTIONS
1645 tree call_expr, arg_list;
1647 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1649 if (GET_MODE (x) != BLKmode)
1652 if (GET_MODE (y) != BLKmode)
1655 x = protect_from_queue (x, 1);
1656 y = protect_from_queue (y, 0);
1657 size = protect_from_queue (size, 0);
1659 if (GET_CODE (x) != MEM)
1661 if (GET_CODE (y) != MEM)
1666 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1667 move_by_pieces (x, y, INTVAL (size), align);
1670 /* Try the most limited insn first, because there's no point
1671 including more than one in the machine description unless
1672 the more limited one has some advantage. */
1674 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1675 enum machine_mode mode;
1677 /* Since this is a move insn, we don't care about volatility. */
1680 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1681 mode = GET_MODE_WIDER_MODE (mode))
1683 enum insn_code code = movstr_optab[(int) mode];
1684 insn_operand_predicate_fn pred;
1686 if (code != CODE_FOR_nothing
1687 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1688 here because if SIZE is less than the mode mask, as it is
1689 returned by the macro, it will definitely be less than the
1690 actual mode mask. */
1691 && ((GET_CODE (size) == CONST_INT
1692 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1693 <= (GET_MODE_MASK (mode) >> 1)))
1694 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1695 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1696 || (*pred) (x, BLKmode))
1697 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1698 || (*pred) (y, BLKmode))
1699 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1700 || (*pred) (opalign, VOIDmode)))
1703 rtx last = get_last_insn ();
1706 op2 = convert_to_mode (mode, size, 1);
1707 pred = insn_data[(int) code].operand[2].predicate;
1708 if (pred != 0 && ! (*pred) (op2, mode))
1709 op2 = copy_to_mode_reg (mode, op2);
1711 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1719 delete_insns_since (last);
1725 /* X, Y, or SIZE may have been passed through protect_from_queue.
1727 It is unsafe to save the value generated by protect_from_queue
1728 and reuse it later. Consider what happens if emit_queue is
1729 called before the return value from protect_from_queue is used.
1731 Expansion of the CALL_EXPR below will call emit_queue before
1732 we are finished emitting RTL for argument setup. So if we are
1733 not careful we could get the wrong value for an argument.
1735 To avoid this problem we go ahead and emit code to copy X, Y &
1736 SIZE into new pseudos. We can then place those new pseudos
1737 into an RTL_EXPR and use them later, even after a call to
1740 Note this is not strictly needed for library calls since they
1741 do not call emit_queue before loading their arguments. However,
1742 we may need to have library calls call emit_queue in the future
1743 since failing to do so could cause problems for targets which
1744 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1745 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1746 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1748 #ifdef TARGET_MEM_FUNCTIONS
1749 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1751 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1752 TREE_UNSIGNED (integer_type_node));
1753 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1756 #ifdef TARGET_MEM_FUNCTIONS
1757 /* It is incorrect to use the libcall calling conventions to call
1758 memcpy in this context.
1760 This could be a user call to memcpy and the user may wish to
1761 examine the return value from memcpy.
1763 For targets where libcalls and normal calls have different conventions
1764 for returning pointers, we could end up generating incorrect code.
1766 So instead of using a libcall sequence we build up a suitable
1767 CALL_EXPR and expand the call in the normal fashion. */
1768 if (fn == NULL_TREE)
1772 /* This was copied from except.c, I don't know if all this is
1773 necessary in this context or not. */
1774 fn = get_identifier ("memcpy");
1775 fntype = build_pointer_type (void_type_node);
1776 fntype = build_function_type (fntype, NULL_TREE);
1777 fn = build_decl (FUNCTION_DECL, fn, fntype);
1778 ggc_add_tree_root (&fn, 1);
1779 DECL_EXTERNAL (fn) = 1;
1780 TREE_PUBLIC (fn) = 1;
1781 DECL_ARTIFICIAL (fn) = 1;
1782 TREE_NOTHROW (fn) = 1;
1783 make_decl_rtl (fn, NULL);
1784 assemble_external (fn);
1787 /* We need to make an argument list for the function call.
1789 memcpy has three arguments, the first two are void * addresses and
1790 the last is a size_t byte count for the copy. */
1792 = build_tree_list (NULL_TREE,
1793 make_tree (build_pointer_type (void_type_node), x));
1794 TREE_CHAIN (arg_list)
1795 = build_tree_list (NULL_TREE,
1796 make_tree (build_pointer_type (void_type_node), y));
1797 TREE_CHAIN (TREE_CHAIN (arg_list))
1798 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1799 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1801 /* Now we have to build up the CALL_EXPR itself. */
1802 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1803 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1804 call_expr, arg_list, NULL_TREE);
1805 TREE_SIDE_EFFECTS (call_expr) = 1;
1807 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1809 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1810 VOIDmode, 3, y, Pmode, x, Pmode,
1811 convert_to_mode (TYPE_MODE (integer_type_node), size,
1812 TREE_UNSIGNED (integer_type_node)),
1813 TYPE_MODE (integer_type_node));
1816 /* If we are initializing a readonly value, show the above call
1817 clobbered it. Otherwise, a load from it may erroneously be hoisted
1819 if (RTX_UNCHANGING_P (x))
1820 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1826 /* Copy all or part of a value X into registers starting at REGNO.
1827 The number of registers to be filled is NREGS. */
1830 move_block_to_reg (regno, x, nregs, mode)
1834 enum machine_mode mode;
1837 #ifdef HAVE_load_multiple
1845 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1846 x = validize_mem (force_const_mem (mode, x));
1848 /* See if the machine can do this with a load multiple insn. */
1849 #ifdef HAVE_load_multiple
1850 if (HAVE_load_multiple)
1852 last = get_last_insn ();
1853 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1861 delete_insns_since (last);
1865 for (i = 0; i < nregs; i++)
1866 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1867 operand_subword_force (x, i, mode));
1870 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1871 The number of registers to be filled is NREGS. SIZE indicates the number
1872 of bytes in the object X. */
1875 move_block_from_reg (regno, x, nregs, size)
1882 #ifdef HAVE_store_multiple
1886 enum machine_mode mode;
1891 /* If SIZE is that of a mode no bigger than a word, just use that
1892 mode's store operation. */
1893 if (size <= UNITS_PER_WORD
1894 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1896 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1900 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1901 to the left before storing to memory. Note that the previous test
1902 doesn't handle all cases (e.g. SIZE == 3). */
1903 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1905 rtx tem = operand_subword (x, 0, 1, BLKmode);
1911 shift = expand_shift (LSHIFT_EXPR, word_mode,
1912 gen_rtx_REG (word_mode, regno),
1913 build_int_2 ((UNITS_PER_WORD - size)
1914 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1915 emit_move_insn (tem, shift);
1919 /* See if the machine can do this with a store multiple insn. */
1920 #ifdef HAVE_store_multiple
1921 if (HAVE_store_multiple)
1923 last = get_last_insn ();
1924 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1932 delete_insns_since (last);
1936 for (i = 0; i < nregs; i++)
1938 rtx tem = operand_subword (x, i, 1, BLKmode);
1943 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1947 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1948 registers represented by a PARALLEL. SSIZE represents the total size of
1949 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1951 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1952 the balance will be in what would be the low-order memory addresses, i.e.
1953 left justified for big endian, right justified for little endian. This
1954 happens to be true for the targets currently using this support. If this
1955 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1959 emit_group_load (dst, orig_src, ssize, align)
1967 if (GET_CODE (dst) != PARALLEL)
1970 /* Check for a NULL entry, used to indicate that the parameter goes
1971 both on the stack and in registers. */
1972 if (XEXP (XVECEXP (dst, 0, 0), 0))
1977 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1979 /* Process the pieces. */
1980 for (i = start; i < XVECLEN (dst, 0); i++)
1982 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1983 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1984 unsigned int bytelen = GET_MODE_SIZE (mode);
1987 /* Handle trailing fragments that run over the size of the struct. */
1988 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1990 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1991 bytelen = ssize - bytepos;
1996 /* If we won't be loading directly from memory, protect the real source
1997 from strange tricks we might play; but make sure that the source can
1998 be loaded directly into the destination. */
2000 if (GET_CODE (orig_src) != MEM
2001 && (!CONSTANT_P (orig_src)
2002 || (GET_MODE (orig_src) != mode
2003 && GET_MODE (orig_src) != VOIDmode)))
2005 if (GET_MODE (orig_src) == VOIDmode)
2006 src = gen_reg_rtx (mode);
2008 src = gen_reg_rtx (GET_MODE (orig_src));
2009 emit_move_insn (src, orig_src);
2012 /* Optimize the access just a bit. */
2013 if (GET_CODE (src) == MEM
2014 && align >= GET_MODE_ALIGNMENT (mode)
2015 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2016 && bytelen == GET_MODE_SIZE (mode))
2018 tmps[i] = gen_reg_rtx (mode);
2019 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2021 else if (GET_CODE (src) == CONCAT)
2024 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2025 tmps[i] = XEXP (src, 0);
2026 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2027 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2028 tmps[i] = XEXP (src, 1);
2029 else if (bytepos == 0)
2032 mem = assign_stack_temp (GET_MODE (src),
2033 GET_MODE_SIZE (GET_MODE (src)), 0);
2034 emit_move_insn (mem, src);
2035 tmps[i] = change_address (mem, mode, XEXP (mem, 0));
2040 else if (CONSTANT_P (src)
2041 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2044 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2045 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2046 mode, mode, align, ssize);
2048 if (BYTES_BIG_ENDIAN && shift)
2049 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2050 tmps[i], 0, OPTAB_WIDEN);
2055 /* Copy the extracted pieces into the proper (probable) hard regs. */
2056 for (i = start; i < XVECLEN (dst, 0); i++)
2057 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2060 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2061 registers represented by a PARALLEL. SSIZE represents the total size of
2062 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2065 emit_group_store (orig_dst, src, ssize, align)
2073 if (GET_CODE (src) != PARALLEL)
2076 /* Check for a NULL entry, used to indicate that the parameter goes
2077 both on the stack and in registers. */
2078 if (XEXP (XVECEXP (src, 0, 0), 0))
2083 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2085 /* Copy the (probable) hard regs into pseudos. */
2086 for (i = start; i < XVECLEN (src, 0); i++)
2088 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2089 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2090 emit_move_insn (tmps[i], reg);
2094 /* If we won't be storing directly into memory, protect the real destination
2095 from strange tricks we might play. */
2097 if (GET_CODE (dst) == PARALLEL)
2101 /* We can get a PARALLEL dst if there is a conditional expression in
2102 a return statement. In that case, the dst and src are the same,
2103 so no action is necessary. */
2104 if (rtx_equal_p (dst, src))
2107 /* It is unclear if we can ever reach here, but we may as well handle
2108 it. Allocate a temporary, and split this into a store/load to/from
2111 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2112 emit_group_store (temp, src, ssize, align);
2113 emit_group_load (dst, temp, ssize, align);
2116 else if (GET_CODE (dst) != MEM)
2118 dst = gen_reg_rtx (GET_MODE (orig_dst));
2119 /* Make life a bit easier for combine. */
2120 emit_move_insn (dst, const0_rtx);
2123 /* Process the pieces. */
2124 for (i = start; i < XVECLEN (src, 0); i++)
2126 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2127 enum machine_mode mode = GET_MODE (tmps[i]);
2128 unsigned int bytelen = GET_MODE_SIZE (mode);
2130 /* Handle trailing fragments that run over the size of the struct. */
2131 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2133 if (BYTES_BIG_ENDIAN)
2135 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2136 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2137 tmps[i], 0, OPTAB_WIDEN);
2139 bytelen = ssize - bytepos;
2142 /* Optimize the access just a bit. */
2143 if (GET_CODE (dst) == MEM
2144 && align >= GET_MODE_ALIGNMENT (mode)
2145 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2146 && bytelen == GET_MODE_SIZE (mode))
2147 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2149 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2150 mode, tmps[i], align, ssize);
2155 /* Copy from the pseudo into the (probable) hard reg. */
2156 if (GET_CODE (dst) == REG)
2157 emit_move_insn (orig_dst, dst);
2160 /* Generate code to copy a BLKmode object of TYPE out of a
2161 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2162 is null, a stack temporary is created. TGTBLK is returned.
2164 The primary purpose of this routine is to handle functions
2165 that return BLKmode structures in registers. Some machines
2166 (the PA for example) want to return all small structures
2167 in registers regardless of the structure's alignment. */
2170 copy_blkmode_from_reg (tgtblk, srcreg, type)
2175 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2176 rtx src = NULL, dst = NULL;
2177 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2178 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2182 tgtblk = assign_temp (build_qualified_type (type,
2184 | TYPE_QUAL_CONST)),
2186 preserve_temp_slots (tgtblk);
2189 /* This code assumes srcreg is at least a full word. If it isn't,
2190 copy it into a new pseudo which is a full word. */
2191 if (GET_MODE (srcreg) != BLKmode
2192 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2193 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2195 /* Structures whose size is not a multiple of a word are aligned
2196 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2197 machine, this means we must skip the empty high order bytes when
2198 calculating the bit offset. */
2199 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2200 big_endian_correction
2201 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2203 /* Copy the structure BITSIZE bites at a time.
2205 We could probably emit more efficient code for machines which do not use
2206 strict alignment, but it doesn't seem worth the effort at the current
2208 for (bitpos = 0, xbitpos = big_endian_correction;
2209 bitpos < bytes * BITS_PER_UNIT;
2210 bitpos += bitsize, xbitpos += bitsize)
2212 /* We need a new source operand each time xbitpos is on a
2213 word boundary and when xbitpos == big_endian_correction
2214 (the first time through). */
2215 if (xbitpos % BITS_PER_WORD == 0
2216 || xbitpos == big_endian_correction)
2217 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2220 /* We need a new destination operand each time bitpos is on
2222 if (bitpos % BITS_PER_WORD == 0)
2223 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2225 /* Use xbitpos for the source extraction (right justified) and
2226 xbitpos for the destination store (left justified). */
2227 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2228 extract_bit_field (src, bitsize,
2229 xbitpos % BITS_PER_WORD, 1,
2230 NULL_RTX, word_mode, word_mode,
2231 bitsize, BITS_PER_WORD),
2232 bitsize, BITS_PER_WORD);
2238 /* Add a USE expression for REG to the (possibly empty) list pointed
2239 to by CALL_FUSAGE. REG must denote a hard register. */
2242 use_reg (call_fusage, reg)
2243 rtx *call_fusage, reg;
2245 if (GET_CODE (reg) != REG
2246 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2250 = gen_rtx_EXPR_LIST (VOIDmode,
2251 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2254 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2255 starting at REGNO. All of these registers must be hard registers. */
2258 use_regs (call_fusage, regno, nregs)
2265 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2268 for (i = 0; i < nregs; i++)
2269 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2272 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2273 PARALLEL REGS. This is for calls that pass values in multiple
2274 non-contiguous locations. The Irix 6 ABI has examples of this. */
2277 use_group_regs (call_fusage, regs)
2283 for (i = 0; i < XVECLEN (regs, 0); i++)
2285 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2287 /* A NULL entry means the parameter goes both on the stack and in
2288 registers. This can also be a MEM for targets that pass values
2289 partially on the stack and partially in registers. */
2290 if (reg != 0 && GET_CODE (reg) == REG)
2291 use_reg (call_fusage, reg);
2297 can_store_by_pieces (len, constfun, constfundata, align)
2298 unsigned HOST_WIDE_INT len;
2299 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2303 unsigned HOST_WIDE_INT max_size, l;
2304 HOST_WIDE_INT offset = 0;
2305 enum machine_mode mode, tmode;
2306 enum insn_code icode;
2310 if (! MOVE_BY_PIECES_P (len, align))
2313 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2314 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2315 align = MOVE_MAX * BITS_PER_UNIT;
2317 /* We would first store what we can in the largest integer mode, then go to
2318 successively smaller modes. */
2321 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2326 max_size = MOVE_MAX_PIECES + 1;
2327 while (max_size > 1)
2329 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2330 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2331 if (GET_MODE_SIZE (tmode) < max_size)
2334 if (mode == VOIDmode)
2337 icode = mov_optab->handlers[(int) mode].insn_code;
2338 if (icode != CODE_FOR_nothing
2339 && align >= GET_MODE_ALIGNMENT (mode))
2341 unsigned int size = GET_MODE_SIZE (mode);
2348 cst = (*constfun) (constfundata, offset, mode);
2349 if (!LEGITIMATE_CONSTANT_P (cst))
2359 max_size = GET_MODE_SIZE (mode);
2362 /* The code above should have handled everything. */
2370 /* Generate several move instructions to store LEN bytes generated by
2371 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2372 pointer which will be passed as argument in every CONSTFUN call.
2373 ALIGN is maximum alignment we can assume. */
2376 store_by_pieces (to, len, constfun, constfundata, align)
2378 unsigned HOST_WIDE_INT len;
2379 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2383 struct store_by_pieces data;
2385 if (! MOVE_BY_PIECES_P (len, align))
2387 to = protect_from_queue (to, 1);
2388 data.constfun = constfun;
2389 data.constfundata = constfundata;
2392 store_by_pieces_1 (&data, align);
2395 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2396 rtx with BLKmode). The caller must pass TO through protect_from_queue
2397 before calling. ALIGN is maximum alignment we can assume. */
2400 clear_by_pieces (to, len, align)
2402 unsigned HOST_WIDE_INT len;
2405 struct store_by_pieces data;
2407 data.constfun = clear_by_pieces_1;
2408 data.constfundata = NULL;
2411 store_by_pieces_1 (&data, align);
2414 /* Callback routine for clear_by_pieces.
2415 Return const0_rtx unconditionally. */
2418 clear_by_pieces_1 (data, offset, mode)
2419 PTR data ATTRIBUTE_UNUSED;
2420 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2421 enum machine_mode mode ATTRIBUTE_UNUSED;
2426 /* Subroutine of clear_by_pieces and store_by_pieces.
2427 Generate several move instructions to store LEN bytes of block TO. (A MEM
2428 rtx with BLKmode). The caller must pass TO through protect_from_queue
2429 before calling. ALIGN is maximum alignment we can assume. */
2432 store_by_pieces_1 (data, align)
2433 struct store_by_pieces *data;
2436 rtx to_addr = XEXP (data->to, 0);
2437 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2438 enum machine_mode mode = VOIDmode, tmode;
2439 enum insn_code icode;
2442 data->to_addr = to_addr;
2444 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2445 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2447 data->explicit_inc_to = 0;
2449 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2451 data->offset = data->len;
2453 /* If storing requires more than two move insns,
2454 copy addresses to registers (to make displacements shorter)
2455 and use post-increment if available. */
2456 if (!data->autinc_to
2457 && move_by_pieces_ninsns (data->len, align) > 2)
2459 /* Determine the main mode we'll be using. */
2460 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2461 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2462 if (GET_MODE_SIZE (tmode) < max_size)
2465 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2467 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2468 data->autinc_to = 1;
2469 data->explicit_inc_to = -1;
2472 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2473 && ! data->autinc_to)
2475 data->to_addr = copy_addr_to_reg (to_addr);
2476 data->autinc_to = 1;
2477 data->explicit_inc_to = 1;
2480 if ( !data->autinc_to && CONSTANT_P (to_addr))
2481 data->to_addr = copy_addr_to_reg (to_addr);
2484 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2485 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2486 align = MOVE_MAX * BITS_PER_UNIT;
2488 /* First store what we can in the largest integer mode, then go to
2489 successively smaller modes. */
2491 while (max_size > 1)
2493 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2494 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2495 if (GET_MODE_SIZE (tmode) < max_size)
2498 if (mode == VOIDmode)
2501 icode = mov_optab->handlers[(int) mode].insn_code;
2502 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2503 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2505 max_size = GET_MODE_SIZE (mode);
2508 /* The code above should have handled everything. */
2513 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2514 with move instructions for mode MODE. GENFUN is the gen_... function
2515 to make a move insn for that mode. DATA has all the other info. */
2518 store_by_pieces_2 (genfun, mode, data)
2519 rtx (*genfun) PARAMS ((rtx, ...));
2520 enum machine_mode mode;
2521 struct store_by_pieces *data;
2523 unsigned int size = GET_MODE_SIZE (mode);
2526 while (data->len >= size)
2529 data->offset -= size;
2531 if (data->autinc_to)
2533 to1 = replace_equiv_address (data->to, data->to_addr);
2534 to1 = adjust_address (to1, mode, 0);
2537 to1 = adjust_address (data->to, mode, data->offset);
2539 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2540 emit_insn (gen_add2_insn (data->to_addr,
2541 GEN_INT (-(HOST_WIDE_INT) size)));
2543 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2544 emit_insn ((*genfun) (to1, cst));
2546 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2547 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2549 if (! data->reverse)
2550 data->offset += size;
2556 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2557 its length in bytes. */
2560 clear_storage (object, size)
2564 #ifdef TARGET_MEM_FUNCTIONS
2566 tree call_expr, arg_list;
2569 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2570 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2572 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2573 just move a zero. Otherwise, do this a piece at a time. */
2574 if (GET_MODE (object) != BLKmode
2575 && GET_CODE (size) == CONST_INT
2576 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2577 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2580 object = protect_from_queue (object, 1);
2581 size = protect_from_queue (size, 0);
2583 if (GET_CODE (size) == CONST_INT
2584 && MOVE_BY_PIECES_P (INTVAL (size), align))
2585 clear_by_pieces (object, INTVAL (size), align);
2588 /* Try the most limited insn first, because there's no point
2589 including more than one in the machine description unless
2590 the more limited one has some advantage. */
2592 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2593 enum machine_mode mode;
2595 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2596 mode = GET_MODE_WIDER_MODE (mode))
2598 enum insn_code code = clrstr_optab[(int) mode];
2599 insn_operand_predicate_fn pred;
2601 if (code != CODE_FOR_nothing
2602 /* We don't need MODE to be narrower than
2603 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2604 the mode mask, as it is returned by the macro, it will
2605 definitely be less than the actual mode mask. */
2606 && ((GET_CODE (size) == CONST_INT
2607 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2608 <= (GET_MODE_MASK (mode) >> 1)))
2609 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2610 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2611 || (*pred) (object, BLKmode))
2612 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2613 || (*pred) (opalign, VOIDmode)))
2616 rtx last = get_last_insn ();
2619 op1 = convert_to_mode (mode, size, 1);
2620 pred = insn_data[(int) code].operand[1].predicate;
2621 if (pred != 0 && ! (*pred) (op1, mode))
2622 op1 = copy_to_mode_reg (mode, op1);
2624 pat = GEN_FCN ((int) code) (object, op1, opalign);
2631 delete_insns_since (last);
2635 /* OBJECT or SIZE may have been passed through protect_from_queue.
2637 It is unsafe to save the value generated by protect_from_queue
2638 and reuse it later. Consider what happens if emit_queue is
2639 called before the return value from protect_from_queue is used.
2641 Expansion of the CALL_EXPR below will call emit_queue before
2642 we are finished emitting RTL for argument setup. So if we are
2643 not careful we could get the wrong value for an argument.
2645 To avoid this problem we go ahead and emit code to copy OBJECT
2646 and SIZE into new pseudos. We can then place those new pseudos
2647 into an RTL_EXPR and use them later, even after a call to
2650 Note this is not strictly needed for library calls since they
2651 do not call emit_queue before loading their arguments. However,
2652 we may need to have library calls call emit_queue in the future
2653 since failing to do so could cause problems for targets which
2654 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2655 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2657 #ifdef TARGET_MEM_FUNCTIONS
2658 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2660 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2661 TREE_UNSIGNED (integer_type_node));
2662 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2665 #ifdef TARGET_MEM_FUNCTIONS
2666 /* It is incorrect to use the libcall calling conventions to call
2667 memset in this context.
2669 This could be a user call to memset and the user may wish to
2670 examine the return value from memset.
2672 For targets where libcalls and normal calls have different
2673 conventions for returning pointers, we could end up generating
2676 So instead of using a libcall sequence we build up a suitable
2677 CALL_EXPR and expand the call in the normal fashion. */
2678 if (fn == NULL_TREE)
2682 /* This was copied from except.c, I don't know if all this is
2683 necessary in this context or not. */
2684 fn = get_identifier ("memset");
2685 fntype = build_pointer_type (void_type_node);
2686 fntype = build_function_type (fntype, NULL_TREE);
2687 fn = build_decl (FUNCTION_DECL, fn, fntype);
2688 ggc_add_tree_root (&fn, 1);
2689 DECL_EXTERNAL (fn) = 1;
2690 TREE_PUBLIC (fn) = 1;
2691 DECL_ARTIFICIAL (fn) = 1;
2692 TREE_NOTHROW (fn) = 1;
2693 make_decl_rtl (fn, NULL);
2694 assemble_external (fn);
2697 /* We need to make an argument list for the function call.
2699 memset has three arguments, the first is a void * addresses, the
2700 second an integer with the initialization value, the last is a
2701 size_t byte count for the copy. */
2703 = build_tree_list (NULL_TREE,
2704 make_tree (build_pointer_type (void_type_node),
2706 TREE_CHAIN (arg_list)
2707 = build_tree_list (NULL_TREE,
2708 make_tree (integer_type_node, const0_rtx));
2709 TREE_CHAIN (TREE_CHAIN (arg_list))
2710 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2711 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2713 /* Now we have to build up the CALL_EXPR itself. */
2714 call_expr = build1 (ADDR_EXPR,
2715 build_pointer_type (TREE_TYPE (fn)), fn);
2716 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2717 call_expr, arg_list, NULL_TREE);
2718 TREE_SIDE_EFFECTS (call_expr) = 1;
2720 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2722 emit_library_call (bzero_libfunc, LCT_NORMAL,
2723 VOIDmode, 2, object, Pmode, size,
2724 TYPE_MODE (integer_type_node));
2727 /* If we are initializing a readonly value, show the above call
2728 clobbered it. Otherwise, a load from it may erroneously be
2729 hoisted from a loop. */
2730 if (RTX_UNCHANGING_P (object))
2731 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2738 /* Generate code to copy Y into X.
2739 Both Y and X must have the same mode, except that
2740 Y can be a constant with VOIDmode.
2741 This mode cannot be BLKmode; use emit_block_move for that.
2743 Return the last instruction emitted. */
2746 emit_move_insn (x, y)
2749 enum machine_mode mode = GET_MODE (x);
2750 rtx y_cst = NULL_RTX;
2753 x = protect_from_queue (x, 1);
2754 y = protect_from_queue (y, 0);
2756 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2759 /* Never force constant_p_rtx to memory. */
2760 if (GET_CODE (y) == CONSTANT_P_RTX)
2762 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2765 y = force_const_mem (mode, y);
2768 /* If X or Y are memory references, verify that their addresses are valid
2770 if (GET_CODE (x) == MEM
2771 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2772 && ! push_operand (x, GET_MODE (x)))
2774 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2775 x = validize_mem (x);
2777 if (GET_CODE (y) == MEM
2778 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2780 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2781 y = validize_mem (y);
2783 if (mode == BLKmode)
2786 last_insn = emit_move_insn_1 (x, y);
2788 if (y_cst && GET_CODE (x) == REG)
2789 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2794 /* Low level part of emit_move_insn.
2795 Called just like emit_move_insn, but assumes X and Y
2796 are basically valid. */
2799 emit_move_insn_1 (x, y)
2802 enum machine_mode mode = GET_MODE (x);
2803 enum machine_mode submode;
2804 enum mode_class class = GET_MODE_CLASS (mode);
2807 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2810 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2812 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2814 /* Expand complex moves by moving real part and imag part, if possible. */
2815 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2816 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2818 (class == MODE_COMPLEX_INT
2819 ? MODE_INT : MODE_FLOAT),
2821 && (mov_optab->handlers[(int) submode].insn_code
2822 != CODE_FOR_nothing))
2824 /* Don't split destination if it is a stack push. */
2825 int stack = push_operand (x, GET_MODE (x));
2827 #ifdef PUSH_ROUNDING
2828 /* In case we output to the stack, but the size is smaller machine can
2829 push exactly, we need to use move instructions. */
2831 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2834 int offset1, offset2;
2836 /* Do not use anti_adjust_stack, since we don't want to update
2837 stack_pointer_delta. */
2838 temp = expand_binop (Pmode,
2839 #ifdef STACK_GROWS_DOWNWARD
2846 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2850 if (temp != stack_pointer_rtx)
2851 emit_move_insn (stack_pointer_rtx, temp);
2852 #ifdef STACK_GROWS_DOWNWARD
2854 offset2 = GET_MODE_SIZE (submode);
2856 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2857 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2858 + GET_MODE_SIZE (submode));
2860 emit_move_insn (change_address (x, submode,
2861 gen_rtx_PLUS (Pmode,
2863 GEN_INT (offset1))),
2864 gen_realpart (submode, y));
2865 emit_move_insn (change_address (x, submode,
2866 gen_rtx_PLUS (Pmode,
2868 GEN_INT (offset2))),
2869 gen_imagpart (submode, y));
2873 /* If this is a stack, push the highpart first, so it
2874 will be in the argument order.
2876 In that case, change_address is used only to convert
2877 the mode, not to change the address. */
2880 /* Note that the real part always precedes the imag part in memory
2881 regardless of machine's endianness. */
2882 #ifdef STACK_GROWS_DOWNWARD
2883 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2884 (gen_rtx_MEM (submode, XEXP (x, 0)),
2885 gen_imagpart (submode, y)));
2886 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2887 (gen_rtx_MEM (submode, XEXP (x, 0)),
2888 gen_realpart (submode, y)));
2890 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2891 (gen_rtx_MEM (submode, XEXP (x, 0)),
2892 gen_realpart (submode, y)));
2893 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2894 (gen_rtx_MEM (submode, XEXP (x, 0)),
2895 gen_imagpart (submode, y)));
2900 rtx realpart_x, realpart_y;
2901 rtx imagpart_x, imagpart_y;
2903 /* If this is a complex value with each part being smaller than a
2904 word, the usual calling sequence will likely pack the pieces into
2905 a single register. Unfortunately, SUBREG of hard registers only
2906 deals in terms of words, so we have a problem converting input
2907 arguments to the CONCAT of two registers that is used elsewhere
2908 for complex values. If this is before reload, we can copy it into
2909 memory and reload. FIXME, we should see about using extract and
2910 insert on integer registers, but complex short and complex char
2911 variables should be rarely used. */
2912 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2913 && (reload_in_progress | reload_completed) == 0)
2915 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2916 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2918 if (packed_dest_p || packed_src_p)
2920 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2921 ? MODE_FLOAT : MODE_INT);
2923 enum machine_mode reg_mode
2924 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2926 if (reg_mode != BLKmode)
2928 rtx mem = assign_stack_temp (reg_mode,
2929 GET_MODE_SIZE (mode), 0);
2930 rtx cmem = adjust_address (mem, mode, 0);
2933 = N_("function using short complex types cannot be inline");
2937 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2938 emit_move_insn_1 (cmem, y);
2939 return emit_move_insn_1 (sreg, mem);
2943 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2944 emit_move_insn_1 (mem, sreg);
2945 return emit_move_insn_1 (x, cmem);
2951 realpart_x = gen_realpart (submode, x);
2952 realpart_y = gen_realpart (submode, y);
2953 imagpart_x = gen_imagpart (submode, x);
2954 imagpart_y = gen_imagpart (submode, y);
2956 /* Show the output dies here. This is necessary for SUBREGs
2957 of pseudos since we cannot track their lifetimes correctly;
2958 hard regs shouldn't appear here except as return values.
2959 We never want to emit such a clobber after reload. */
2961 && ! (reload_in_progress || reload_completed)
2962 && (GET_CODE (realpart_x) == SUBREG
2963 || GET_CODE (imagpart_x) == SUBREG))
2965 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2968 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2969 (realpart_x, realpart_y));
2970 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2971 (imagpart_x, imagpart_y));
2974 return get_last_insn ();
2977 /* This will handle any multi-word mode that lacks a move_insn pattern.
2978 However, you will get better code if you define such patterns,
2979 even if they must turn into multiple assembler instructions. */
2980 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2986 #ifdef PUSH_ROUNDING
2988 /* If X is a push on the stack, do the push now and replace
2989 X with a reference to the stack pointer. */
2990 if (push_operand (x, GET_MODE (x)))
2995 /* Do not use anti_adjust_stack, since we don't want to update
2996 stack_pointer_delta. */
2997 temp = expand_binop (Pmode,
2998 #ifdef STACK_GROWS_DOWNWARD
3005 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
3009 if (temp != stack_pointer_rtx)
3010 emit_move_insn (stack_pointer_rtx, temp);
3012 code = GET_CODE (XEXP (x, 0));
3013 /* Just hope that small offsets off SP are OK. */
3014 if (code == POST_INC)
3015 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3016 GEN_INT (-(HOST_WIDE_INT)
3017 GET_MODE_SIZE (GET_MODE (x))));
3018 else if (code == POST_DEC)
3019 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3020 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3022 temp = stack_pointer_rtx;
3024 x = change_address (x, VOIDmode, temp);
3028 /* If we are in reload, see if either operand is a MEM whose address
3029 is scheduled for replacement. */
3030 if (reload_in_progress && GET_CODE (x) == MEM
3031 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3032 x = replace_equiv_address_nv (x, inner);
3033 if (reload_in_progress && GET_CODE (y) == MEM
3034 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3035 y = replace_equiv_address_nv (y, inner);
3041 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3044 rtx xpart = operand_subword (x, i, 1, mode);
3045 rtx ypart = operand_subword (y, i, 1, mode);
3047 /* If we can't get a part of Y, put Y into memory if it is a
3048 constant. Otherwise, force it into a register. If we still
3049 can't get a part of Y, abort. */
3050 if (ypart == 0 && CONSTANT_P (y))
3052 y = force_const_mem (mode, y);
3053 ypart = operand_subword (y, i, 1, mode);
3055 else if (ypart == 0)
3056 ypart = operand_subword_force (y, i, mode);
3058 if (xpart == 0 || ypart == 0)
3061 need_clobber |= (GET_CODE (xpart) == SUBREG);
3063 last_insn = emit_move_insn (xpart, ypart);
3066 seq = gen_sequence ();
3069 /* Show the output dies here. This is necessary for SUBREGs
3070 of pseudos since we cannot track their lifetimes correctly;
3071 hard regs shouldn't appear here except as return values.
3072 We never want to emit such a clobber after reload. */
3074 && ! (reload_in_progress || reload_completed)
3075 && need_clobber != 0)
3077 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3088 /* Pushing data onto the stack. */
3090 /* Push a block of length SIZE (perhaps variable)
3091 and return an rtx to address the beginning of the block.
3092 Note that it is not possible for the value returned to be a QUEUED.
3093 The value may be virtual_outgoing_args_rtx.
3095 EXTRA is the number of bytes of padding to push in addition to SIZE.
3096 BELOW nonzero means this padding comes at low addresses;
3097 otherwise, the padding comes at high addresses. */
3100 push_block (size, extra, below)
3106 size = convert_modes (Pmode, ptr_mode, size, 1);
3107 if (CONSTANT_P (size))
3108 anti_adjust_stack (plus_constant (size, extra));
3109 else if (GET_CODE (size) == REG && extra == 0)
3110 anti_adjust_stack (size);
3113 temp = copy_to_mode_reg (Pmode, size);
3115 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3116 temp, 0, OPTAB_LIB_WIDEN);
3117 anti_adjust_stack (temp);
3120 #ifndef STACK_GROWS_DOWNWARD
3126 temp = virtual_outgoing_args_rtx;
3127 if (extra != 0 && below)
3128 temp = plus_constant (temp, extra);
3132 if (GET_CODE (size) == CONST_INT)
3133 temp = plus_constant (virtual_outgoing_args_rtx,
3134 -INTVAL (size) - (below ? 0 : extra));
3135 else if (extra != 0 && !below)
3136 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3137 negate_rtx (Pmode, plus_constant (size, extra)));
3139 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3140 negate_rtx (Pmode, size));
3143 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3147 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3148 block of SIZE bytes. */
3151 get_push_address (size)
3156 if (STACK_PUSH_CODE == POST_DEC)
3157 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3158 else if (STACK_PUSH_CODE == POST_INC)
3159 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3161 temp = stack_pointer_rtx;
3163 return copy_to_reg (temp);
3166 #ifdef PUSH_ROUNDING
3168 /* Emit single push insn. */
3171 emit_single_push_insn (mode, x, type)
3173 enum machine_mode mode;
3177 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3179 enum insn_code icode;
3180 insn_operand_predicate_fn pred;
3182 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3183 /* If there is push pattern, use it. Otherwise try old way of throwing
3184 MEM representing push operation to move expander. */
3185 icode = push_optab->handlers[(int) mode].insn_code;
3186 if (icode != CODE_FOR_nothing)
3188 if (((pred = insn_data[(int) icode].operand[0].predicate)
3189 && !((*pred) (x, mode))))
3190 x = force_reg (mode, x);
3191 emit_insn (GEN_FCN (icode) (x));
3194 if (GET_MODE_SIZE (mode) == rounded_size)
3195 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3198 #ifdef STACK_GROWS_DOWNWARD
3199 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3200 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3202 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3203 GEN_INT (rounded_size));
3205 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3208 dest = gen_rtx_MEM (mode, dest_addr);
3212 set_mem_attributes (dest, type, 1);
3213 /* Function incoming arguments may overlap with sibling call
3214 outgoing arguments and we cannot allow reordering of reads
3215 from function arguments with stores to outgoing arguments
3216 of sibling calls. */
3217 set_mem_alias_set (dest, 0);
3219 emit_move_insn (dest, x);
3223 /* Generate code to push X onto the stack, assuming it has mode MODE and
3225 MODE is redundant except when X is a CONST_INT (since they don't
3227 SIZE is an rtx for the size of data to be copied (in bytes),
3228 needed only if X is BLKmode.
3230 ALIGN (in bits) is maximum alignment we can assume.
3232 If PARTIAL and REG are both nonzero, then copy that many of the first
3233 words of X into registers starting with REG, and push the rest of X.
3234 The amount of space pushed is decreased by PARTIAL words,
3235 rounded *down* to a multiple of PARM_BOUNDARY.
3236 REG must be a hard register in this case.
3237 If REG is zero but PARTIAL is not, take any all others actions for an
3238 argument partially in registers, but do not actually load any
3241 EXTRA is the amount in bytes of extra space to leave next to this arg.
3242 This is ignored if an argument block has already been allocated.
3244 On a machine that lacks real push insns, ARGS_ADDR is the address of
3245 the bottom of the argument block for this call. We use indexing off there
3246 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3247 argument block has not been preallocated.
3249 ARGS_SO_FAR is the size of args previously pushed for this call.
3251 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3252 for arguments passed in registers. If nonzero, it will be the number
3253 of bytes required. */
3256 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3257 args_addr, args_so_far, reg_parm_stack_space,
3260 enum machine_mode mode;
3269 int reg_parm_stack_space;
3273 enum direction stack_direction
3274 #ifdef STACK_GROWS_DOWNWARD
3280 /* Decide where to pad the argument: `downward' for below,
3281 `upward' for above, or `none' for don't pad it.
3282 Default is below for small data on big-endian machines; else above. */
3283 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3285 /* Invert direction if stack is post-decrement.
3287 if (STACK_PUSH_CODE == POST_DEC)
3288 if (where_pad != none)
3289 where_pad = (where_pad == downward ? upward : downward);
3291 xinner = x = protect_from_queue (x, 0);
3293 if (mode == BLKmode)
3295 /* Copy a block into the stack, entirely or partially. */
3298 int used = partial * UNITS_PER_WORD;
3299 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3307 /* USED is now the # of bytes we need not copy to the stack
3308 because registers will take care of them. */
3311 xinner = adjust_address (xinner, BLKmode, used);
3313 /* If the partial register-part of the arg counts in its stack size,
3314 skip the part of stack space corresponding to the registers.
3315 Otherwise, start copying to the beginning of the stack space,
3316 by setting SKIP to 0. */
3317 skip = (reg_parm_stack_space == 0) ? 0 : used;
3319 #ifdef PUSH_ROUNDING
3320 /* Do it with several push insns if that doesn't take lots of insns
3321 and if there is no difficulty with push insns that skip bytes
3322 on the stack for alignment purposes. */
3325 && GET_CODE (size) == CONST_INT
3327 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3328 /* Here we avoid the case of a structure whose weak alignment
3329 forces many pushes of a small amount of data,
3330 and such small pushes do rounding that causes trouble. */
3331 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3332 || align >= BIGGEST_ALIGNMENT
3333 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3334 == (align / BITS_PER_UNIT)))
3335 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3337 /* Push padding now if padding above and stack grows down,
3338 or if padding below and stack grows up.
3339 But if space already allocated, this has already been done. */
3340 if (extra && args_addr == 0
3341 && where_pad != none && where_pad != stack_direction)
3342 anti_adjust_stack (GEN_INT (extra));
3344 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3346 if (current_function_check_memory_usage && ! in_check_memory_usage)
3350 in_check_memory_usage = 1;
3351 temp = get_push_address (INTVAL (size) - used);
3352 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3353 emit_library_call (chkr_copy_bitmap_libfunc,
3354 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3355 Pmode, XEXP (xinner, 0), Pmode,
3356 GEN_INT (INTVAL (size) - used),
3357 TYPE_MODE (sizetype));
3359 emit_library_call (chkr_set_right_libfunc,
3360 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3361 Pmode, GEN_INT (INTVAL (size) - used),
3362 TYPE_MODE (sizetype),
3363 GEN_INT (MEMORY_USE_RW),
3364 TYPE_MODE (integer_type_node));
3365 in_check_memory_usage = 0;
3369 #endif /* PUSH_ROUNDING */
3373 /* Otherwise make space on the stack and copy the data
3374 to the address of that space. */
3376 /* Deduct words put into registers from the size we must copy. */
3379 if (GET_CODE (size) == CONST_INT)
3380 size = GEN_INT (INTVAL (size) - used);
3382 size = expand_binop (GET_MODE (size), sub_optab, size,
3383 GEN_INT (used), NULL_RTX, 0,
3387 /* Get the address of the stack space.
3388 In this case, we do not deal with EXTRA separately.
3389 A single stack adjust will do. */
3392 temp = push_block (size, extra, where_pad == downward);
3395 else if (GET_CODE (args_so_far) == CONST_INT)
3396 temp = memory_address (BLKmode,
3397 plus_constant (args_addr,
3398 skip + INTVAL (args_so_far)));
3400 temp = memory_address (BLKmode,
3401 plus_constant (gen_rtx_PLUS (Pmode,
3405 if (current_function_check_memory_usage && ! in_check_memory_usage)
3407 in_check_memory_usage = 1;
3408 target = copy_to_reg (temp);
3409 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3410 emit_library_call (chkr_copy_bitmap_libfunc,
3411 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3413 XEXP (xinner, 0), Pmode,
3414 size, TYPE_MODE (sizetype));
3416 emit_library_call (chkr_set_right_libfunc,
3417 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3419 size, TYPE_MODE (sizetype),
3420 GEN_INT (MEMORY_USE_RW),
3421 TYPE_MODE (integer_type_node));
3422 in_check_memory_usage = 0;
3425 target = gen_rtx_MEM (BLKmode, temp);
3429 set_mem_attributes (target, type, 1);
3430 /* Function incoming arguments may overlap with sibling call
3431 outgoing arguments and we cannot allow reordering of reads
3432 from function arguments with stores to outgoing arguments
3433 of sibling calls. */
3434 set_mem_alias_set (target, 0);
3437 set_mem_align (target, align);
3439 /* TEMP is the address of the block. Copy the data there. */
3440 if (GET_CODE (size) == CONST_INT
3441 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3443 move_by_pieces (target, xinner, INTVAL (size), align);
3448 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3449 enum machine_mode mode;
3451 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3453 mode = GET_MODE_WIDER_MODE (mode))
3455 enum insn_code code = movstr_optab[(int) mode];
3456 insn_operand_predicate_fn pred;
3458 if (code != CODE_FOR_nothing
3459 && ((GET_CODE (size) == CONST_INT
3460 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3461 <= (GET_MODE_MASK (mode) >> 1)))
3462 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3463 && (!(pred = insn_data[(int) code].operand[0].predicate)
3464 || ((*pred) (target, BLKmode)))
3465 && (!(pred = insn_data[(int) code].operand[1].predicate)
3466 || ((*pred) (xinner, BLKmode)))
3467 && (!(pred = insn_data[(int) code].operand[3].predicate)
3468 || ((*pred) (opalign, VOIDmode))))
3470 rtx op2 = convert_to_mode (mode, size, 1);
3471 rtx last = get_last_insn ();
3474 pred = insn_data[(int) code].operand[2].predicate;
3475 if (pred != 0 && ! (*pred) (op2, mode))
3476 op2 = copy_to_mode_reg (mode, op2);
3478 pat = GEN_FCN ((int) code) (target, xinner,
3486 delete_insns_since (last);
3491 if (!ACCUMULATE_OUTGOING_ARGS)
3493 /* If the source is referenced relative to the stack pointer,
3494 copy it to another register to stabilize it. We do not need
3495 to do this if we know that we won't be changing sp. */
3497 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3498 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3499 temp = copy_to_reg (temp);
3502 /* Make inhibit_defer_pop nonzero around the library call
3503 to force it to pop the bcopy-arguments right away. */
3505 #ifdef TARGET_MEM_FUNCTIONS
3506 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3507 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3508 convert_to_mode (TYPE_MODE (sizetype),
3509 size, TREE_UNSIGNED (sizetype)),
3510 TYPE_MODE (sizetype));
3512 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3513 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3514 convert_to_mode (TYPE_MODE (integer_type_node),
3516 TREE_UNSIGNED (integer_type_node)),
3517 TYPE_MODE (integer_type_node));
3522 else if (partial > 0)
3524 /* Scalar partly in registers. */
3526 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3529 /* # words of start of argument
3530 that we must make space for but need not store. */
3531 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3532 int args_offset = INTVAL (args_so_far);
3535 /* Push padding now if padding above and stack grows down,
3536 or if padding below and stack grows up.
3537 But if space already allocated, this has already been done. */
3538 if (extra && args_addr == 0
3539 && where_pad != none && where_pad != stack_direction)
3540 anti_adjust_stack (GEN_INT (extra));
3542 /* If we make space by pushing it, we might as well push
3543 the real data. Otherwise, we can leave OFFSET nonzero
3544 and leave the space uninitialized. */
3548 /* Now NOT_STACK gets the number of words that we don't need to
3549 allocate on the stack. */
3550 not_stack = partial - offset;
3552 /* If the partial register-part of the arg counts in its stack size,
3553 skip the part of stack space corresponding to the registers.
3554 Otherwise, start copying to the beginning of the stack space,
3555 by setting SKIP to 0. */
3556 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3558 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3559 x = validize_mem (force_const_mem (mode, x));
3561 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3562 SUBREGs of such registers are not allowed. */
3563 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3564 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3565 x = copy_to_reg (x);
3567 /* Loop over all the words allocated on the stack for this arg. */
3568 /* We can do it by words, because any scalar bigger than a word
3569 has a size a multiple of a word. */
3570 #ifndef PUSH_ARGS_REVERSED
3571 for (i = not_stack; i < size; i++)
3573 for (i = size - 1; i >= not_stack; i--)
3575 if (i >= not_stack + offset)
3576 emit_push_insn (operand_subword_force (x, i, mode),
3577 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3579 GEN_INT (args_offset + ((i - not_stack + skip)
3581 reg_parm_stack_space, alignment_pad);
3586 rtx target = NULL_RTX;
3589 /* Push padding now if padding above and stack grows down,
3590 or if padding below and stack grows up.
3591 But if space already allocated, this has already been done. */
3592 if (extra && args_addr == 0
3593 && where_pad != none && where_pad != stack_direction)
3594 anti_adjust_stack (GEN_INT (extra));
3596 #ifdef PUSH_ROUNDING
3597 if (args_addr == 0 && PUSH_ARGS)
3598 emit_single_push_insn (mode, x, type);
3602 if (GET_CODE (args_so_far) == CONST_INT)
3604 = memory_address (mode,
3605 plus_constant (args_addr,
3606 INTVAL (args_so_far)));
3608 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3611 dest = gen_rtx_MEM (mode, addr);
3614 set_mem_attributes (dest, type, 1);
3615 /* Function incoming arguments may overlap with sibling call
3616 outgoing arguments and we cannot allow reordering of reads
3617 from function arguments with stores to outgoing arguments
3618 of sibling calls. */
3619 set_mem_alias_set (dest, 0);
3622 emit_move_insn (dest, x);
3626 if (current_function_check_memory_usage && ! in_check_memory_usage)
3628 in_check_memory_usage = 1;
3630 target = get_push_address (GET_MODE_SIZE (mode));
3632 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3633 emit_library_call (chkr_copy_bitmap_libfunc,
3634 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3635 Pmode, XEXP (x, 0), Pmode,
3636 GEN_INT (GET_MODE_SIZE (mode)),
3637 TYPE_MODE (sizetype));
3639 emit_library_call (chkr_set_right_libfunc,
3640 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3641 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3642 TYPE_MODE (sizetype),
3643 GEN_INT (MEMORY_USE_RW),
3644 TYPE_MODE (integer_type_node));
3645 in_check_memory_usage = 0;
3650 /* If part should go in registers, copy that part
3651 into the appropriate registers. Do this now, at the end,
3652 since mem-to-mem copies above may do function calls. */
3653 if (partial > 0 && reg != 0)
3655 /* Handle calls that pass values in multiple non-contiguous locations.
3656 The Irix 6 ABI has examples of this. */
3657 if (GET_CODE (reg) == PARALLEL)
3658 emit_group_load (reg, x, -1, align); /* ??? size? */
3660 move_block_to_reg (REGNO (reg), x, partial, mode);
3663 if (extra && args_addr == 0 && where_pad == stack_direction)
3664 anti_adjust_stack (GEN_INT (extra));
3666 if (alignment_pad && args_addr == 0)
3667 anti_adjust_stack (alignment_pad);
3670 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3678 /* Only registers can be subtargets. */
3679 || GET_CODE (x) != REG
3680 /* If the register is readonly, it can't be set more than once. */
3681 || RTX_UNCHANGING_P (x)
3682 /* Don't use hard regs to avoid extending their life. */
3683 || REGNO (x) < FIRST_PSEUDO_REGISTER
3684 /* Avoid subtargets inside loops,
3685 since they hide some invariant expressions. */
3686 || preserve_subexpressions_p ())
3690 /* Expand an assignment that stores the value of FROM into TO.
3691 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3692 (This may contain a QUEUED rtx;
3693 if the value is constant, this rtx is a constant.)
3694 Otherwise, the returned value is NULL_RTX.
3696 SUGGEST_REG is no longer actually used.
3697 It used to mean, copy the value through a register
3698 and return that register, if that is possible.
3699 We now use WANT_VALUE to decide whether to do this. */
3702 expand_assignment (to, from, want_value, suggest_reg)
3705 int suggest_reg ATTRIBUTE_UNUSED;
3710 /* Don't crash if the lhs of the assignment was erroneous. */
3712 if (TREE_CODE (to) == ERROR_MARK)
3714 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3715 return want_value ? result : NULL_RTX;
3718 /* Assignment of a structure component needs special treatment
3719 if the structure component's rtx is not simply a MEM.
3720 Assignment of an array element at a constant index, and assignment of
3721 an array element in an unaligned packed structure field, has the same
3724 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3725 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3727 enum machine_mode mode1;
3728 HOST_WIDE_INT bitsize, bitpos;
3733 unsigned int alignment;
3736 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3737 &unsignedp, &volatilep, &alignment);
3739 /* If we are going to use store_bit_field and extract_bit_field,
3740 make sure to_rtx will be safe for multiple use. */
3742 if (mode1 == VOIDmode && want_value)
3743 tem = stabilize_reference (tem);
3745 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3748 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3750 if (GET_CODE (to_rtx) != MEM)
3753 if (GET_MODE (offset_rtx) != ptr_mode)
3754 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3756 #ifdef POINTERS_EXTEND_UNSIGNED
3757 if (GET_MODE (offset_rtx) != Pmode)
3758 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3761 /* A constant address in TO_RTX can have VOIDmode, we must not try
3762 to call force_reg for that case. Avoid that case. */
3763 if (GET_CODE (to_rtx) == MEM
3764 && GET_MODE (to_rtx) == BLKmode
3765 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3767 && (bitpos % bitsize) == 0
3768 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3769 && alignment == GET_MODE_ALIGNMENT (mode1))
3772 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3774 if (GET_CODE (XEXP (temp, 0)) == REG)
3777 to_rtx = (replace_equiv_address
3778 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3783 to_rtx = offset_address (to_rtx, offset_rtx,
3784 highest_pow2_factor (offset));
3789 if (GET_CODE (to_rtx) == MEM)
3791 /* When the offset is zero, to_rtx is the address of the
3792 structure we are storing into, and hence may be shared.
3793 We must make a new MEM before setting the volatile bit. */
3795 to_rtx = copy_rtx (to_rtx);
3797 MEM_VOLATILE_P (to_rtx) = 1;
3799 #if 0 /* This was turned off because, when a field is volatile
3800 in an object which is not volatile, the object may be in a register,
3801 and then we would abort over here. */
3807 if (TREE_CODE (to) == COMPONENT_REF
3808 && TREE_READONLY (TREE_OPERAND (to, 1)))
3811 to_rtx = copy_rtx (to_rtx);
3813 RTX_UNCHANGING_P (to_rtx) = 1;
3816 /* Check the access. */
3817 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3822 enum machine_mode best_mode;
3824 best_mode = get_best_mode (bitsize, bitpos,
3825 TYPE_ALIGN (TREE_TYPE (tem)),
3827 if (best_mode == VOIDmode)
3830 best_mode_size = GET_MODE_BITSIZE (best_mode);
3831 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3832 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3833 size *= GET_MODE_SIZE (best_mode);
3835 /* Check the access right of the pointer. */
3836 in_check_memory_usage = 1;
3838 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3839 VOIDmode, 3, to_addr, Pmode,
3840 GEN_INT (size), TYPE_MODE (sizetype),
3841 GEN_INT (MEMORY_USE_WO),
3842 TYPE_MODE (integer_type_node));
3843 in_check_memory_usage = 0;
3846 /* If this is a varying-length object, we must get the address of
3847 the source and do an explicit block move. */
3850 unsigned int from_align;
3851 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3853 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3855 emit_block_move (inner_to_rtx, from_rtx, expr_size (from));
3863 if (! can_address_p (to))
3865 to_rtx = copy_rtx (to_rtx);
3866 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3869 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3871 /* Spurious cast for HPUX compiler. */
3872 ? ((enum machine_mode)
3873 TYPE_MODE (TREE_TYPE (to)))
3877 int_size_in_bytes (TREE_TYPE (tem)),
3878 get_alias_set (to));
3880 preserve_temp_slots (result);
3884 /* If the value is meaningful, convert RESULT to the proper mode.
3885 Otherwise, return nothing. */
3886 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3887 TYPE_MODE (TREE_TYPE (from)),
3889 TREE_UNSIGNED (TREE_TYPE (to)))
3894 /* If the rhs is a function call and its value is not an aggregate,
3895 call the function before we start to compute the lhs.
3896 This is needed for correct code for cases such as
3897 val = setjmp (buf) on machines where reference to val
3898 requires loading up part of an address in a separate insn.
3900 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3901 since it might be a promoted variable where the zero- or sign- extension
3902 needs to be done. Handling this in the normal way is safe because no
3903 computation is done before the call. */
3904 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3905 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3906 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3907 && GET_CODE (DECL_RTL (to)) == REG))
3912 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3914 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3916 /* Handle calls that return values in multiple non-contiguous locations.
3917 The Irix 6 ABI has examples of this. */
3918 if (GET_CODE (to_rtx) == PARALLEL)
3919 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3920 TYPE_ALIGN (TREE_TYPE (from)));
3921 else if (GET_MODE (to_rtx) == BLKmode)
3922 emit_block_move (to_rtx, value, expr_size (from));
3925 #ifdef POINTERS_EXTEND_UNSIGNED
3926 if (POINTER_TYPE_P (TREE_TYPE (to))
3927 && GET_MODE (to_rtx) != GET_MODE (value))
3928 value = convert_memory_address (GET_MODE (to_rtx), value);
3930 emit_move_insn (to_rtx, value);
3932 preserve_temp_slots (to_rtx);
3935 return want_value ? to_rtx : NULL_RTX;
3938 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3939 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3942 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3944 /* Don't move directly into a return register. */
3945 if (TREE_CODE (to) == RESULT_DECL
3946 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3951 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3953 if (GET_CODE (to_rtx) == PARALLEL)
3954 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3955 TYPE_ALIGN (TREE_TYPE (from)));
3957 emit_move_insn (to_rtx, temp);
3959 preserve_temp_slots (to_rtx);
3962 return want_value ? to_rtx : NULL_RTX;
3965 /* In case we are returning the contents of an object which overlaps
3966 the place the value is being stored, use a safe function when copying
3967 a value through a pointer into a structure value return block. */
3968 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3969 && current_function_returns_struct
3970 && !current_function_returns_pcc_struct)
3975 size = expr_size (from);
3976 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3977 EXPAND_MEMORY_USE_DONT);
3979 /* Copy the rights of the bitmap. */
3980 if (current_function_check_memory_usage)
3981 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3982 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3983 XEXP (from_rtx, 0), Pmode,
3984 convert_to_mode (TYPE_MODE (sizetype),
3985 size, TREE_UNSIGNED (sizetype)),
3986 TYPE_MODE (sizetype));
3988 #ifdef TARGET_MEM_FUNCTIONS
3989 emit_library_call (memmove_libfunc, LCT_NORMAL,
3990 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3991 XEXP (from_rtx, 0), Pmode,
3992 convert_to_mode (TYPE_MODE (sizetype),
3993 size, TREE_UNSIGNED (sizetype)),
3994 TYPE_MODE (sizetype));
3996 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3997 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3998 XEXP (to_rtx, 0), Pmode,
3999 convert_to_mode (TYPE_MODE (integer_type_node),
4000 size, TREE_UNSIGNED (integer_type_node)),
4001 TYPE_MODE (integer_type_node));
4004 preserve_temp_slots (to_rtx);
4007 return want_value ? to_rtx : NULL_RTX;
4010 /* Compute FROM and store the value in the rtx we got. */
4013 result = store_expr (from, to_rtx, want_value);
4014 preserve_temp_slots (result);
4017 return want_value ? result : NULL_RTX;
4020 /* Generate code for computing expression EXP,
4021 and storing the value into TARGET.
4022 TARGET may contain a QUEUED rtx.
4024 If WANT_VALUE is nonzero, return a copy of the value
4025 not in TARGET, so that we can be sure to use the proper
4026 value in a containing expression even if TARGET has something
4027 else stored in it. If possible, we copy the value through a pseudo
4028 and return that pseudo. Or, if the value is constant, we try to
4029 return the constant. In some cases, we return a pseudo
4030 copied *from* TARGET.
4032 If the mode is BLKmode then we may return TARGET itself.
4033 It turns out that in BLKmode it doesn't cause a problem.
4034 because C has no operators that could combine two different
4035 assignments into the same BLKmode object with different values
4036 with no sequence point. Will other languages need this to
4039 If WANT_VALUE is 0, we return NULL, to make sure
4040 to catch quickly any cases where the caller uses the value
4041 and fails to set WANT_VALUE. */
4044 store_expr (exp, target, want_value)
4050 int dont_return_target = 0;
4051 int dont_store_target = 0;
4053 if (TREE_CODE (exp) == COMPOUND_EXPR)
4055 /* Perform first part of compound expression, then assign from second
4057 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4059 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4061 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4063 /* For conditional expression, get safe form of the target. Then
4064 test the condition, doing the appropriate assignment on either
4065 side. This avoids the creation of unnecessary temporaries.
4066 For non-BLKmode, it is more efficient not to do this. */
4068 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4071 target = protect_from_queue (target, 1);
4073 do_pending_stack_adjust ();
4075 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4076 start_cleanup_deferral ();
4077 store_expr (TREE_OPERAND (exp, 1), target, 0);
4078 end_cleanup_deferral ();
4080 emit_jump_insn (gen_jump (lab2));
4083 start_cleanup_deferral ();
4084 store_expr (TREE_OPERAND (exp, 2), target, 0);
4085 end_cleanup_deferral ();
4090 return want_value ? target : NULL_RTX;
4092 else if (queued_subexp_p (target))
4093 /* If target contains a postincrement, let's not risk
4094 using it as the place to generate the rhs. */
4096 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4098 /* Expand EXP into a new pseudo. */
4099 temp = gen_reg_rtx (GET_MODE (target));
4100 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4103 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4105 /* If target is volatile, ANSI requires accessing the value
4106 *from* the target, if it is accessed. So make that happen.
4107 In no case return the target itself. */
4108 if (! MEM_VOLATILE_P (target) && want_value)
4109 dont_return_target = 1;
4111 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4112 && GET_MODE (target) != BLKmode)
4113 /* If target is in memory and caller wants value in a register instead,
4114 arrange that. Pass TARGET as target for expand_expr so that,
4115 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4116 We know expand_expr will not use the target in that case.
4117 Don't do this if TARGET is volatile because we are supposed
4118 to write it and then read it. */
4120 temp = expand_expr (exp, target, GET_MODE (target), 0);
4121 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4123 /* If TEMP is already in the desired TARGET, only copy it from
4124 memory and don't store it there again. */
4126 || (rtx_equal_p (temp, target)
4127 && ! side_effects_p (temp) && ! side_effects_p (target)))
4128 dont_store_target = 1;
4129 temp = copy_to_reg (temp);
4131 dont_return_target = 1;
4133 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4134 /* If this is an scalar in a register that is stored in a wider mode
4135 than the declared mode, compute the result into its declared mode
4136 and then convert to the wider mode. Our value is the computed
4139 /* If we don't want a value, we can do the conversion inside EXP,
4140 which will often result in some optimizations. Do the conversion
4141 in two steps: first change the signedness, if needed, then
4142 the extend. But don't do this if the type of EXP is a subtype
4143 of something else since then the conversion might involve
4144 more than just converting modes. */
4145 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4146 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4148 if (TREE_UNSIGNED (TREE_TYPE (exp))
4149 != SUBREG_PROMOTED_UNSIGNED_P (target))
4152 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4156 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4157 SUBREG_PROMOTED_UNSIGNED_P (target)),
4161 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4163 /* If TEMP is a volatile MEM and we want a result value, make
4164 the access now so it gets done only once. Likewise if
4165 it contains TARGET. */
4166 if (GET_CODE (temp) == MEM && want_value
4167 && (MEM_VOLATILE_P (temp)
4168 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4169 temp = copy_to_reg (temp);
4171 /* If TEMP is a VOIDmode constant, use convert_modes to make
4172 sure that we properly convert it. */
4173 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4175 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4176 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4177 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4178 GET_MODE (target), temp,
4179 SUBREG_PROMOTED_UNSIGNED_P (target));
4182 convert_move (SUBREG_REG (target), temp,
4183 SUBREG_PROMOTED_UNSIGNED_P (target));
4185 /* If we promoted a constant, change the mode back down to match
4186 target. Otherwise, the caller might get confused by a result whose
4187 mode is larger than expected. */
4189 if (want_value && GET_MODE (temp) != GET_MODE (target)
4190 && GET_MODE (temp) != VOIDmode)
4192 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4193 SUBREG_PROMOTED_VAR_P (temp) = 1;
4194 SUBREG_PROMOTED_UNSIGNED_P (temp)
4195 = SUBREG_PROMOTED_UNSIGNED_P (target);
4198 return want_value ? temp : NULL_RTX;
4202 temp = expand_expr (exp, target, GET_MODE (target), 0);
4203 /* Return TARGET if it's a specified hardware register.
4204 If TARGET is a volatile mem ref, either return TARGET
4205 or return a reg copied *from* TARGET; ANSI requires this.
4207 Otherwise, if TEMP is not TARGET, return TEMP
4208 if it is constant (for efficiency),
4209 or if we really want the correct value. */
4210 if (!(target && GET_CODE (target) == REG
4211 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4212 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4213 && ! rtx_equal_p (temp, target)
4214 && (CONSTANT_P (temp) || want_value))
4215 dont_return_target = 1;
4218 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4219 the same as that of TARGET, adjust the constant. This is needed, for
4220 example, in case it is a CONST_DOUBLE and we want only a word-sized
4222 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4223 && TREE_CODE (exp) != ERROR_MARK
4224 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4225 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4226 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4228 if (current_function_check_memory_usage
4229 && GET_CODE (target) == MEM
4230 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4232 in_check_memory_usage = 1;
4233 if (GET_CODE (temp) == MEM)
4234 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4235 VOIDmode, 3, XEXP (target, 0), Pmode,
4236 XEXP (temp, 0), Pmode,
4237 expr_size (exp), TYPE_MODE (sizetype));
4239 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4240 VOIDmode, 3, XEXP (target, 0), Pmode,
4241 expr_size (exp), TYPE_MODE (sizetype),
4242 GEN_INT (MEMORY_USE_WO),
4243 TYPE_MODE (integer_type_node));
4244 in_check_memory_usage = 0;
4247 /* If value was not generated in the target, store it there.
4248 Convert the value to TARGET's type first if nec. */
4249 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4250 one or both of them are volatile memory refs, we have to distinguish
4252 - expand_expr has used TARGET. In this case, we must not generate
4253 another copy. This can be detected by TARGET being equal according
4255 - expand_expr has not used TARGET - that means that the source just
4256 happens to have the same RTX form. Since temp will have been created
4257 by expand_expr, it will compare unequal according to == .
4258 We must generate a copy in this case, to reach the correct number
4259 of volatile memory references. */
4261 if ((! rtx_equal_p (temp, target)
4262 || (temp != target && (side_effects_p (temp)
4263 || side_effects_p (target))))
4264 && TREE_CODE (exp) != ERROR_MARK
4265 && ! dont_store_target)
4267 target = protect_from_queue (target, 1);
4268 if (GET_MODE (temp) != GET_MODE (target)
4269 && GET_MODE (temp) != VOIDmode)
4271 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4272 if (dont_return_target)
4274 /* In this case, we will return TEMP,
4275 so make sure it has the proper mode.
4276 But don't forget to store the value into TARGET. */
4277 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4278 emit_move_insn (target, temp);
4281 convert_move (target, temp, unsignedp);
4284 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4286 /* Handle copying a string constant into an array.
4287 The string constant may be shorter than the array.
4288 So copy just the string's actual length, and clear the rest. */
4292 /* Get the size of the data type of the string,
4293 which is actually the size of the target. */
4294 size = expr_size (exp);
4295 if (GET_CODE (size) == CONST_INT
4296 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4297 emit_block_move (target, temp, size);
4300 /* Compute the size of the data to copy from the string. */
4302 = size_binop (MIN_EXPR,
4303 make_tree (sizetype, size),
4304 size_int (TREE_STRING_LENGTH (exp)));
4305 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4309 /* Copy that much. */
4310 emit_block_move (target, temp, copy_size_rtx);
4312 /* Figure out how much is left in TARGET that we have to clear.
4313 Do all calculations in ptr_mode. */
4315 addr = XEXP (target, 0);
4316 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4318 if (GET_CODE (copy_size_rtx) == CONST_INT)
4320 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4321 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4325 addr = force_reg (ptr_mode, addr);
4326 addr = expand_binop (ptr_mode, add_optab, addr,
4327 copy_size_rtx, NULL_RTX, 0,
4330 size = expand_binop (ptr_mode, sub_optab, size,
4331 copy_size_rtx, NULL_RTX, 0,
4334 label = gen_label_rtx ();
4335 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4336 GET_MODE (size), 0, 0, label);
4339 if (size != const0_rtx)
4341 rtx dest = gen_rtx_MEM (BLKmode, addr);
4343 MEM_COPY_ATTRIBUTES (dest, target);
4345 /* Be sure we can write on ADDR. */
4346 in_check_memory_usage = 1;
4347 if (current_function_check_memory_usage)
4348 emit_library_call (chkr_check_addr_libfunc,
4349 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4351 size, TYPE_MODE (sizetype),
4352 GEN_INT (MEMORY_USE_WO),
4353 TYPE_MODE (integer_type_node));
4354 in_check_memory_usage = 0;
4355 clear_storage (dest, size);
4362 /* Handle calls that return values in multiple non-contiguous locations.
4363 The Irix 6 ABI has examples of this. */
4364 else if (GET_CODE (target) == PARALLEL)
4365 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4366 TYPE_ALIGN (TREE_TYPE (exp)));
4367 else if (GET_MODE (temp) == BLKmode)
4368 emit_block_move (target, temp, expr_size (exp));
4370 emit_move_insn (target, temp);
4373 /* If we don't want a value, return NULL_RTX. */
4377 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4378 ??? The latter test doesn't seem to make sense. */
4379 else if (dont_return_target && GET_CODE (temp) != MEM)
4382 /* Return TARGET itself if it is a hard register. */
4383 else if (want_value && GET_MODE (target) != BLKmode
4384 && ! (GET_CODE (target) == REG
4385 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4386 return copy_to_reg (target);
4392 /* Return 1 if EXP just contains zeros. */
4400 switch (TREE_CODE (exp))
4404 case NON_LVALUE_EXPR:
4405 return is_zeros_p (TREE_OPERAND (exp, 0));
4408 return integer_zerop (exp);
4412 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4415 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4418 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4419 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4420 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4421 if (! is_zeros_p (TREE_VALUE (elt)))
4431 /* Return 1 if EXP contains mostly (3/4) zeros. */
4434 mostly_zeros_p (exp)
4437 if (TREE_CODE (exp) == CONSTRUCTOR)
4439 int elts = 0, zeros = 0;
4440 tree elt = CONSTRUCTOR_ELTS (exp);
4441 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4443 /* If there are no ranges of true bits, it is all zero. */
4444 return elt == NULL_TREE;
4446 for (; elt; elt = TREE_CHAIN (elt))
4448 /* We do not handle the case where the index is a RANGE_EXPR,
4449 so the statistic will be somewhat inaccurate.
4450 We do make a more accurate count in store_constructor itself,
4451 so since this function is only used for nested array elements,
4452 this should be close enough. */
4453 if (mostly_zeros_p (TREE_VALUE (elt)))
4458 return 4 * zeros >= 3 * elts;
4461 return is_zeros_p (exp);
4464 /* Helper function for store_constructor.
4465 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4466 TYPE is the type of the CONSTRUCTOR, not the element type.
4467 ALIGN and CLEARED are as for store_constructor.
4468 ALIAS_SET is the alias set to use for any stores.
4470 This provides a recursive shortcut back to store_constructor when it isn't
4471 necessary to go through store_field. This is so that we can pass through
4472 the cleared field to let store_constructor know that we may not have to
4473 clear a substructure if the outer structure has already been cleared. */
4476 store_constructor_field (target, bitsize, bitpos,
4477 mode, exp, type, align, cleared, alias_set)
4479 unsigned HOST_WIDE_INT bitsize;
4480 HOST_WIDE_INT bitpos;
4481 enum machine_mode mode;
4487 if (TREE_CODE (exp) == CONSTRUCTOR
4488 && bitpos % BITS_PER_UNIT == 0
4489 /* If we have a non-zero bitpos for a register target, then we just
4490 let store_field do the bitfield handling. This is unlikely to
4491 generate unnecessary clear instructions anyways. */
4492 && (bitpos == 0 || GET_CODE (target) == MEM))
4494 if (GET_CODE (target) == MEM)
4496 = adjust_address (target,
4497 GET_MODE (target) == BLKmode
4499 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4500 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4503 /* Show the alignment may no longer be what it was and update the alias
4504 set, if required. */
4506 align = MIN (align, (unsigned int) bitpos & - bitpos);
4508 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4509 && MEM_ALIAS_SET (target) != 0)
4511 target = copy_rtx (target);
4512 set_mem_alias_set (target, alias_set);
4515 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4518 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4519 int_size_in_bytes (type), alias_set);
4522 /* Store the value of constructor EXP into the rtx TARGET.
4523 TARGET is either a REG or a MEM.
4524 ALIGN is the maximum known alignment for TARGET.
4525 CLEARED is true if TARGET is known to have been zero'd.
4526 SIZE is the number of bytes of TARGET we are allowed to modify: this
4527 may not be the same as the size of EXP if we are assigning to a field
4528 which has been packed to exclude padding bits. */
4531 store_constructor (exp, target, align, cleared, size)
4538 tree type = TREE_TYPE (exp);
4539 #ifdef WORD_REGISTER_OPERATIONS
4540 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4543 /* We know our target cannot conflict, since safe_from_p has been called. */
4545 /* Don't try copying piece by piece into a hard register
4546 since that is vulnerable to being clobbered by EXP.
4547 Instead, construct in a pseudo register and then copy it all. */
4548 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4550 rtx temp = gen_reg_rtx (GET_MODE (target));
4551 store_constructor (exp, temp, align, cleared, size);
4552 emit_move_insn (target, temp);
4557 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4558 || TREE_CODE (type) == QUAL_UNION_TYPE)
4562 /* Inform later passes that the whole union value is dead. */
4563 if ((TREE_CODE (type) == UNION_TYPE
4564 || TREE_CODE (type) == QUAL_UNION_TYPE)
4567 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4569 /* If the constructor is empty, clear the union. */
4570 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4571 clear_storage (target, expr_size (exp));
4574 /* If we are building a static constructor into a register,
4575 set the initial value as zero so we can fold the value into
4576 a constant. But if more than one register is involved,
4577 this probably loses. */
4578 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4579 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4582 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4587 /* If the constructor has fewer fields than the structure
4588 or if we are initializing the structure to mostly zeros,
4589 clear the whole structure first. Don't do this if TARGET is a
4590 register whose mode size isn't equal to SIZE since clear_storage
4591 can't handle this case. */
4593 && ((list_length (CONSTRUCTOR_ELTS (exp))
4594 != fields_length (type))
4595 || mostly_zeros_p (exp))
4596 && (GET_CODE (target) != REG
4597 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4600 clear_storage (target, GEN_INT (size));
4605 /* Inform later passes that the old value is dead. */
4606 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4608 /* Store each element of the constructor into
4609 the corresponding field of TARGET. */
4611 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4613 tree field = TREE_PURPOSE (elt);
4614 #ifdef WORD_REGISTER_OPERATIONS
4615 tree value = TREE_VALUE (elt);
4617 enum machine_mode mode;
4618 HOST_WIDE_INT bitsize;
4619 HOST_WIDE_INT bitpos = 0;
4622 rtx to_rtx = target;
4624 /* Just ignore missing fields.
4625 We cleared the whole structure, above,
4626 if any fields are missing. */
4630 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4633 if (host_integerp (DECL_SIZE (field), 1))
4634 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4638 unsignedp = TREE_UNSIGNED (field);
4639 mode = DECL_MODE (field);
4640 if (DECL_BIT_FIELD (field))
4643 offset = DECL_FIELD_OFFSET (field);
4644 if (host_integerp (offset, 0)
4645 && host_integerp (bit_position (field), 0))
4647 bitpos = int_bit_position (field);
4651 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4657 if (contains_placeholder_p (offset))
4658 offset = build (WITH_RECORD_EXPR, sizetype,
4659 offset, make_tree (TREE_TYPE (exp), target));
4661 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4662 if (GET_CODE (to_rtx) != MEM)
4665 if (GET_MODE (offset_rtx) != ptr_mode)
4666 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4668 #ifdef POINTERS_EXTEND_UNSIGNED
4669 if (GET_MODE (offset_rtx) != Pmode)
4670 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4673 to_rtx = offset_address (to_rtx, offset_rtx,
4674 highest_pow2_factor (offset));
4676 align = DECL_OFFSET_ALIGN (field);
4679 if (TREE_READONLY (field))
4681 if (GET_CODE (to_rtx) == MEM)
4682 to_rtx = copy_rtx (to_rtx);
4684 RTX_UNCHANGING_P (to_rtx) = 1;
4687 #ifdef WORD_REGISTER_OPERATIONS
4688 /* If this initializes a field that is smaller than a word, at the
4689 start of a word, try to widen it to a full word.
4690 This special case allows us to output C++ member function
4691 initializations in a form that the optimizers can understand. */
4692 if (GET_CODE (target) == REG
4693 && bitsize < BITS_PER_WORD
4694 && bitpos % BITS_PER_WORD == 0
4695 && GET_MODE_CLASS (mode) == MODE_INT
4696 && TREE_CODE (value) == INTEGER_CST
4698 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4700 tree type = TREE_TYPE (value);
4701 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4703 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4704 value = convert (type, value);
4706 if (BYTES_BIG_ENDIAN)
4708 = fold (build (LSHIFT_EXPR, type, value,
4709 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4710 bitsize = BITS_PER_WORD;
4715 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4716 && DECL_NONADDRESSABLE_P (field))
4718 to_rtx = copy_rtx (to_rtx);
4719 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4722 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4723 TREE_VALUE (elt), type, align, cleared,
4724 get_alias_set (TREE_TYPE (field)));
4727 else if (TREE_CODE (type) == ARRAY_TYPE)
4732 tree domain = TYPE_DOMAIN (type);
4733 tree elttype = TREE_TYPE (type);
4734 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4735 && TYPE_MAX_VALUE (domain)
4736 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4737 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4738 HOST_WIDE_INT minelt = 0;
4739 HOST_WIDE_INT maxelt = 0;
4741 /* If we have constant bounds for the range of the type, get them. */
4744 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4745 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4748 /* If the constructor has fewer elements than the array,
4749 clear the whole array first. Similarly if this is
4750 static constructor of a non-BLKmode object. */
4751 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4755 HOST_WIDE_INT count = 0, zero_count = 0;
4756 need_to_clear = ! const_bounds_p;
4758 /* This loop is a more accurate version of the loop in
4759 mostly_zeros_p (it handles RANGE_EXPR in an index).
4760 It is also needed to check for missing elements. */
4761 for (elt = CONSTRUCTOR_ELTS (exp);
4762 elt != NULL_TREE && ! need_to_clear;
4763 elt = TREE_CHAIN (elt))
4765 tree index = TREE_PURPOSE (elt);
4766 HOST_WIDE_INT this_node_count;
4768 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4770 tree lo_index = TREE_OPERAND (index, 0);
4771 tree hi_index = TREE_OPERAND (index, 1);
4773 if (! host_integerp (lo_index, 1)
4774 || ! host_integerp (hi_index, 1))
4780 this_node_count = (tree_low_cst (hi_index, 1)
4781 - tree_low_cst (lo_index, 1) + 1);
4784 this_node_count = 1;
4786 count += this_node_count;
4787 if (mostly_zeros_p (TREE_VALUE (elt)))
4788 zero_count += this_node_count;
4791 /* Clear the entire array first if there are any missing elements,
4792 or if the incidence of zero elements is >= 75%. */
4794 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4798 if (need_to_clear && size > 0)
4801 clear_storage (target, GEN_INT (size));
4804 else if (REG_P (target))
4805 /* Inform later passes that the old value is dead. */
4806 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4808 /* Store each element of the constructor into
4809 the corresponding element of TARGET, determined
4810 by counting the elements. */
4811 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4813 elt = TREE_CHAIN (elt), i++)
4815 enum machine_mode mode;
4816 HOST_WIDE_INT bitsize;
4817 HOST_WIDE_INT bitpos;
4819 tree value = TREE_VALUE (elt);
4820 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4821 tree index = TREE_PURPOSE (elt);
4822 rtx xtarget = target;
4824 if (cleared && is_zeros_p (value))
4827 unsignedp = TREE_UNSIGNED (elttype);
4828 mode = TYPE_MODE (elttype);
4829 if (mode == BLKmode)
4830 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4831 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4834 bitsize = GET_MODE_BITSIZE (mode);
4836 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4838 tree lo_index = TREE_OPERAND (index, 0);
4839 tree hi_index = TREE_OPERAND (index, 1);
4840 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4841 struct nesting *loop;
4842 HOST_WIDE_INT lo, hi, count;
4845 /* If the range is constant and "small", unroll the loop. */
4847 && host_integerp (lo_index, 0)
4848 && host_integerp (hi_index, 0)
4849 && (lo = tree_low_cst (lo_index, 0),
4850 hi = tree_low_cst (hi_index, 0),
4851 count = hi - lo + 1,
4852 (GET_CODE (target) != MEM
4854 || (host_integerp (TYPE_SIZE (elttype), 1)
4855 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4858 lo -= minelt; hi -= minelt;
4859 for (; lo <= hi; lo++)
4861 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4863 if (GET_CODE (target) == MEM
4864 && !MEM_KEEP_ALIAS_SET_P (target)
4865 && TYPE_NONALIASED_COMPONENT (type))
4867 target = copy_rtx (target);
4868 MEM_KEEP_ALIAS_SET_P (target) = 1;
4871 store_constructor_field
4872 (target, bitsize, bitpos, mode, value, type, align,
4873 cleared, get_alias_set (elttype));
4878 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4879 loop_top = gen_label_rtx ();
4880 loop_end = gen_label_rtx ();
4882 unsignedp = TREE_UNSIGNED (domain);
4884 index = build_decl (VAR_DECL, NULL_TREE, domain);
4887 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4889 SET_DECL_RTL (index, index_r);
4890 if (TREE_CODE (value) == SAVE_EXPR
4891 && SAVE_EXPR_RTL (value) == 0)
4893 /* Make sure value gets expanded once before the
4895 expand_expr (value, const0_rtx, VOIDmode, 0);
4898 store_expr (lo_index, index_r, 0);
4899 loop = expand_start_loop (0);
4901 /* Assign value to element index. */
4903 = convert (ssizetype,
4904 fold (build (MINUS_EXPR, TREE_TYPE (index),
4905 index, TYPE_MIN_VALUE (domain))));
4906 position = size_binop (MULT_EXPR, position,
4908 TYPE_SIZE_UNIT (elttype)));
4910 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4911 xtarget = offset_address (target, pos_rtx,
4912 highest_pow2_factor (position));
4913 xtarget = adjust_address (xtarget, mode, 0);
4914 if (TREE_CODE (value) == CONSTRUCTOR)
4915 store_constructor (value, xtarget, align, cleared,
4916 bitsize / BITS_PER_UNIT);
4918 store_expr (value, xtarget, 0);
4920 expand_exit_loop_if_false (loop,
4921 build (LT_EXPR, integer_type_node,
4924 expand_increment (build (PREINCREMENT_EXPR,
4926 index, integer_one_node), 0, 0);
4928 emit_label (loop_end);
4931 else if ((index != 0 && ! host_integerp (index, 0))
4932 || ! host_integerp (TYPE_SIZE (elttype), 1))
4937 index = ssize_int (1);
4940 index = convert (ssizetype,
4941 fold (build (MINUS_EXPR, index,
4942 TYPE_MIN_VALUE (domain))));
4944 position = size_binop (MULT_EXPR, index,
4946 TYPE_SIZE_UNIT (elttype)));
4947 xtarget = offset_address (target,
4948 expand_expr (position, 0, VOIDmode, 0),
4949 highest_pow2_factor (position));
4950 xtarget = adjust_address (xtarget, mode, 0);
4951 store_expr (value, xtarget, 0);
4956 bitpos = ((tree_low_cst (index, 0) - minelt)
4957 * tree_low_cst (TYPE_SIZE (elttype), 1));
4959 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4961 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4962 && TYPE_NONALIASED_COMPONENT (type))
4964 target = copy_rtx (target);
4965 MEM_KEEP_ALIAS_SET_P (target) = 1;
4968 store_constructor_field (target, bitsize, bitpos, mode, value,
4969 type, align, cleared,
4970 get_alias_set (elttype));
4976 /* Set constructor assignments. */
4977 else if (TREE_CODE (type) == SET_TYPE)
4979 tree elt = CONSTRUCTOR_ELTS (exp);
4980 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4981 tree domain = TYPE_DOMAIN (type);
4982 tree domain_min, domain_max, bitlength;
4984 /* The default implementation strategy is to extract the constant
4985 parts of the constructor, use that to initialize the target,
4986 and then "or" in whatever non-constant ranges we need in addition.
4988 If a large set is all zero or all ones, it is
4989 probably better to set it using memset (if available) or bzero.
4990 Also, if a large set has just a single range, it may also be
4991 better to first clear all the first clear the set (using
4992 bzero/memset), and set the bits we want. */
4994 /* Check for all zeros. */
4995 if (elt == NULL_TREE && size > 0)
4998 clear_storage (target, GEN_INT (size));
5002 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5003 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5004 bitlength = size_binop (PLUS_EXPR,
5005 size_diffop (domain_max, domain_min),
5008 nbits = tree_low_cst (bitlength, 1);
5010 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5011 are "complicated" (more than one range), initialize (the
5012 constant parts) by copying from a constant. */
5013 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5014 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5016 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5017 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5018 char *bit_buffer = (char *) alloca (nbits);
5019 HOST_WIDE_INT word = 0;
5020 unsigned int bit_pos = 0;
5021 unsigned int ibit = 0;
5022 unsigned int offset = 0; /* In bytes from beginning of set. */
5024 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5027 if (bit_buffer[ibit])
5029 if (BYTES_BIG_ENDIAN)
5030 word |= (1 << (set_word_size - 1 - bit_pos));
5032 word |= 1 << bit_pos;
5036 if (bit_pos >= set_word_size || ibit == nbits)
5038 if (word != 0 || ! cleared)
5040 rtx datum = GEN_INT (word);
5043 /* The assumption here is that it is safe to use
5044 XEXP if the set is multi-word, but not if
5045 it's single-word. */
5046 if (GET_CODE (target) == MEM)
5047 to_rtx = adjust_address (target, mode, offset);
5048 else if (offset == 0)
5052 emit_move_insn (to_rtx, datum);
5059 offset += set_word_size / BITS_PER_UNIT;
5064 /* Don't bother clearing storage if the set is all ones. */
5065 if (TREE_CHAIN (elt) != NULL_TREE
5066 || (TREE_PURPOSE (elt) == NULL_TREE
5068 : ( ! host_integerp (TREE_VALUE (elt), 0)
5069 || ! host_integerp (TREE_PURPOSE (elt), 0)
5070 || (tree_low_cst (TREE_VALUE (elt), 0)
5071 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5072 != (HOST_WIDE_INT) nbits))))
5073 clear_storage (target, expr_size (exp));
5075 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5077 /* Start of range of element or NULL. */
5078 tree startbit = TREE_PURPOSE (elt);
5079 /* End of range of element, or element value. */
5080 tree endbit = TREE_VALUE (elt);
5081 #ifdef TARGET_MEM_FUNCTIONS
5082 HOST_WIDE_INT startb, endb;
5084 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5086 bitlength_rtx = expand_expr (bitlength,
5087 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5089 /* Handle non-range tuple element like [ expr ]. */
5090 if (startbit == NULL_TREE)
5092 startbit = save_expr (endbit);
5096 startbit = convert (sizetype, startbit);
5097 endbit = convert (sizetype, endbit);
5098 if (! integer_zerop (domain_min))
5100 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5101 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5103 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5104 EXPAND_CONST_ADDRESS);
5105 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5106 EXPAND_CONST_ADDRESS);
5112 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5115 emit_move_insn (targetx, target);
5118 else if (GET_CODE (target) == MEM)
5123 #ifdef TARGET_MEM_FUNCTIONS
5124 /* Optimization: If startbit and endbit are
5125 constants divisible by BITS_PER_UNIT,
5126 call memset instead. */
5127 if (TREE_CODE (startbit) == INTEGER_CST
5128 && TREE_CODE (endbit) == INTEGER_CST
5129 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5130 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5132 emit_library_call (memset_libfunc, LCT_NORMAL,
5134 plus_constant (XEXP (targetx, 0),
5135 startb / BITS_PER_UNIT),
5137 constm1_rtx, TYPE_MODE (integer_type_node),
5138 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5139 TYPE_MODE (sizetype));
5143 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5144 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5145 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5146 startbit_rtx, TYPE_MODE (sizetype),
5147 endbit_rtx, TYPE_MODE (sizetype));
5150 emit_move_insn (target, targetx);
5158 /* Store the value of EXP (an expression tree)
5159 into a subfield of TARGET which has mode MODE and occupies
5160 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5161 If MODE is VOIDmode, it means that we are storing into a bit-field.
5163 If VALUE_MODE is VOIDmode, return nothing in particular.
5164 UNSIGNEDP is not used in this case.
5166 Otherwise, return an rtx for the value stored. This rtx
5167 has mode VALUE_MODE if that is convenient to do.
5168 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5170 ALIGN is the alignment that TARGET is known to have.
5171 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5173 ALIAS_SET is the alias set for the destination. This value will
5174 (in general) be different from that for TARGET, since TARGET is a
5175 reference to the containing structure. */
5178 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5179 unsignedp, align, total_size, alias_set)
5181 HOST_WIDE_INT bitsize;
5182 HOST_WIDE_INT bitpos;
5183 enum machine_mode mode;
5185 enum machine_mode value_mode;
5188 HOST_WIDE_INT total_size;
5191 HOST_WIDE_INT width_mask = 0;
5193 if (TREE_CODE (exp) == ERROR_MARK)
5196 /* If we have nothing to store, do nothing unless the expression has
5199 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5201 if (bitsize < HOST_BITS_PER_WIDE_INT)
5202 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5204 /* If we are storing into an unaligned field of an aligned union that is
5205 in a register, we may have the mode of TARGET being an integer mode but
5206 MODE == BLKmode. In that case, get an aligned object whose size and
5207 alignment are the same as TARGET and store TARGET into it (we can avoid
5208 the store if the field being stored is the entire width of TARGET). Then
5209 call ourselves recursively to store the field into a BLKmode version of
5210 that object. Finally, load from the object into TARGET. This is not
5211 very efficient in general, but should only be slightly more expensive
5212 than the otherwise-required unaligned accesses. Perhaps this can be
5213 cleaned up later. */
5216 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5220 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5223 rtx blk_object = copy_rtx (object);
5225 PUT_MODE (blk_object, BLKmode);
5226 set_mem_alias_set (blk_object, 0);
5228 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5229 emit_move_insn (object, target);
5231 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5232 align, total_size, alias_set);
5234 /* Even though we aren't returning target, we need to
5235 give it the updated value. */
5236 emit_move_insn (target, object);
5241 if (GET_CODE (target) == CONCAT)
5243 /* We're storing into a struct containing a single __complex. */
5247 return store_expr (exp, target, 0);
5250 /* If the structure is in a register or if the component
5251 is a bit field, we cannot use addressing to access it.
5252 Use bit-field techniques or SUBREG to store in it. */
5254 if (mode == VOIDmode
5255 || (mode != BLKmode && ! direct_store[(int) mode]
5256 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5257 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5258 || GET_CODE (target) == REG
5259 || GET_CODE (target) == SUBREG
5260 /* If the field isn't aligned enough to store as an ordinary memref,
5261 store it as a bit field. */
5262 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5263 && (align < GET_MODE_ALIGNMENT (mode)
5264 || bitpos % GET_MODE_ALIGNMENT (mode)))
5265 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5266 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5267 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5268 /* If the RHS and field are a constant size and the size of the
5269 RHS isn't the same size as the bitfield, we must use bitfield
5272 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5273 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5275 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5277 /* If BITSIZE is narrower than the size of the type of EXP
5278 we will be narrowing TEMP. Normally, what's wanted are the
5279 low-order bits. However, if EXP's type is a record and this is
5280 big-endian machine, we want the upper BITSIZE bits. */
5281 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5282 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5283 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5284 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5285 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5289 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5291 if (mode != VOIDmode && mode != BLKmode
5292 && mode != TYPE_MODE (TREE_TYPE (exp)))
5293 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5295 /* If the modes of TARGET and TEMP are both BLKmode, both
5296 must be in memory and BITPOS must be aligned on a byte
5297 boundary. If so, we simply do a block copy. */
5298 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5300 unsigned int exp_align = expr_align (exp);
5302 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5303 || bitpos % BITS_PER_UNIT != 0)
5306 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5308 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5309 align = MIN (exp_align, align);
5311 /* Find an alignment that is consistent with the bit position. */
5312 while ((bitpos % align) != 0)
5315 emit_block_move (target, temp,
5316 bitsize == -1 ? expr_size (exp)
5317 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5320 return value_mode == VOIDmode ? const0_rtx : target;
5323 /* Store the value in the bitfield. */
5324 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5325 if (value_mode != VOIDmode)
5327 /* The caller wants an rtx for the value. */
5328 /* If possible, avoid refetching from the bitfield itself. */
5330 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5333 enum machine_mode tmode;
5336 return expand_and (temp,
5340 GET_MODE (temp) == VOIDmode
5342 : GET_MODE (temp))), NULL_RTX);
5343 tmode = GET_MODE (temp);
5344 if (tmode == VOIDmode)
5346 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5347 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5348 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5350 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5351 NULL_RTX, value_mode, 0, align,
5358 rtx addr = XEXP (target, 0);
5361 /* If a value is wanted, it must be the lhs;
5362 so make the address stable for multiple use. */
5364 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5365 && ! CONSTANT_ADDRESS_P (addr)
5366 /* A frame-pointer reference is already stable. */
5367 && ! (GET_CODE (addr) == PLUS
5368 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5369 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5370 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5371 target = replace_equiv_address (target, copy_to_reg (addr));
5373 /* Now build a reference to just the desired component. */
5375 to_rtx = copy_rtx (adjust_address (target, mode,
5376 bitpos / BITS_PER_UNIT));
5378 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5379 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5381 to_rtx = copy_rtx (to_rtx);
5382 set_mem_alias_set (to_rtx, alias_set);
5385 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5389 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5390 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5391 codes and find the ultimate containing object, which we return.
5393 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5394 bit position, and *PUNSIGNEDP to the signedness of the field.
5395 If the position of the field is variable, we store a tree
5396 giving the variable offset (in units) in *POFFSET.
5397 This offset is in addition to the bit position.
5398 If the position is not variable, we store 0 in *POFFSET.
5399 We set *PALIGNMENT to the alignment of the address that will be
5400 computed. This is the alignment of the thing we return if *POFFSET
5401 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5403 If any of the extraction expressions is volatile,
5404 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5406 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5407 is a mode that can be used to access the field. In that case, *PBITSIZE
5410 If the field describes a variable-sized object, *PMODE is set to
5411 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5412 this case, but the address of the object can be found. */
5415 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5416 punsignedp, pvolatilep, palignment)
5418 HOST_WIDE_INT *pbitsize;
5419 HOST_WIDE_INT *pbitpos;
5421 enum machine_mode *pmode;
5424 unsigned int *palignment;
5427 enum machine_mode mode = VOIDmode;
5428 tree offset = size_zero_node;
5429 tree bit_offset = bitsize_zero_node;
5430 unsigned int alignment = BIGGEST_ALIGNMENT;
5431 tree placeholder_ptr = 0;
5434 /* First get the mode, signedness, and size. We do this from just the
5435 outermost expression. */
5436 if (TREE_CODE (exp) == COMPONENT_REF)
5438 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5439 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5440 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5442 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5444 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5446 size_tree = TREE_OPERAND (exp, 1);
5447 *punsignedp = TREE_UNSIGNED (exp);
5451 mode = TYPE_MODE (TREE_TYPE (exp));
5452 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5454 if (mode == BLKmode)
5455 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5457 *pbitsize = GET_MODE_BITSIZE (mode);
5462 if (! host_integerp (size_tree, 1))
5463 mode = BLKmode, *pbitsize = -1;
5465 *pbitsize = tree_low_cst (size_tree, 1);
5468 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5469 and find the ultimate containing object. */
5472 if (TREE_CODE (exp) == BIT_FIELD_REF)
5473 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5474 else if (TREE_CODE (exp) == COMPONENT_REF)
5476 tree field = TREE_OPERAND (exp, 1);
5477 tree this_offset = DECL_FIELD_OFFSET (field);
5479 /* If this field hasn't been filled in yet, don't go
5480 past it. This should only happen when folding expressions
5481 made during type construction. */
5482 if (this_offset == 0)
5484 else if (! TREE_CONSTANT (this_offset)
5485 && contains_placeholder_p (this_offset))
5486 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5488 offset = size_binop (PLUS_EXPR, offset, this_offset);
5489 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5490 DECL_FIELD_BIT_OFFSET (field));
5492 if (! host_integerp (offset, 0))
5493 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5496 else if (TREE_CODE (exp) == ARRAY_REF
5497 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5499 tree index = TREE_OPERAND (exp, 1);
5500 tree array = TREE_OPERAND (exp, 0);
5501 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5502 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5503 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5505 /* We assume all arrays have sizes that are a multiple of a byte.
5506 First subtract the lower bound, if any, in the type of the
5507 index, then convert to sizetype and multiply by the size of the
5509 if (low_bound != 0 && ! integer_zerop (low_bound))
5510 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5513 /* If the index has a self-referential type, pass it to a
5514 WITH_RECORD_EXPR; if the component size is, pass our
5515 component to one. */
5516 if (! TREE_CONSTANT (index)
5517 && contains_placeholder_p (index))
5518 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5519 if (! TREE_CONSTANT (unit_size)
5520 && contains_placeholder_p (unit_size))
5521 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5523 offset = size_binop (PLUS_EXPR, offset,
5524 size_binop (MULT_EXPR,
5525 convert (sizetype, index),
5529 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5531 tree new = find_placeholder (exp, &placeholder_ptr);
5533 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5534 We might have been called from tree optimization where we
5535 haven't set up an object yet. */
5543 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5544 && ! ((TREE_CODE (exp) == NOP_EXPR
5545 || TREE_CODE (exp) == CONVERT_EXPR)
5546 && (TYPE_MODE (TREE_TYPE (exp))
5547 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5550 /* If any reference in the chain is volatile, the effect is volatile. */
5551 if (TREE_THIS_VOLATILE (exp))
5554 /* If the offset is non-constant already, then we can't assume any
5555 alignment more than the alignment here. */
5556 if (! TREE_CONSTANT (offset))
5557 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5559 exp = TREE_OPERAND (exp, 0);
5563 alignment = MIN (alignment, DECL_ALIGN (exp));
5564 else if (TREE_TYPE (exp) != 0)
5565 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5567 /* If OFFSET is constant, see if we can return the whole thing as a
5568 constant bit position. Otherwise, split it up. */
5569 if (host_integerp (offset, 0)
5570 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5572 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5573 && host_integerp (tem, 0))
5574 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5576 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5579 *palignment = alignment;
5583 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5585 static enum memory_use_mode
5586 get_memory_usage_from_modifier (modifier)
5587 enum expand_modifier modifier;
5593 return MEMORY_USE_RO;
5595 case EXPAND_MEMORY_USE_WO:
5596 return MEMORY_USE_WO;
5598 case EXPAND_MEMORY_USE_RW:
5599 return MEMORY_USE_RW;
5601 case EXPAND_MEMORY_USE_DONT:
5602 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5603 MEMORY_USE_DONT, because they are modifiers to a call of
5604 expand_expr in the ADDR_EXPR case of expand_expr. */
5605 case EXPAND_CONST_ADDRESS:
5606 case EXPAND_INITIALIZER:
5607 return MEMORY_USE_DONT;
5608 case EXPAND_MEMORY_USE_BAD:
5614 /* Given an rtx VALUE that may contain additions and multiplications, return
5615 an equivalent value that just refers to a register, memory, or constant.
5616 This is done by generating instructions to perform the arithmetic and
5617 returning a pseudo-register containing the value.
5619 The returned value may be a REG, SUBREG, MEM or constant. */
5622 force_operand (value, target)
5626 /* Use a temporary to force order of execution of calls to
5630 /* Use subtarget as the target for operand 0 of a binary operation. */
5631 rtx subtarget = get_subtarget (target);
5633 /* Check for a PIC address load. */
5635 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5636 && XEXP (value, 0) == pic_offset_table_rtx
5637 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5638 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5639 || GET_CODE (XEXP (value, 1)) == CONST))
5642 subtarget = gen_reg_rtx (GET_MODE (value));
5643 emit_move_insn (subtarget, value);
5647 if (GET_CODE (value) == PLUS)
5648 binoptab = add_optab;
5649 else if (GET_CODE (value) == MINUS)
5650 binoptab = sub_optab;
5651 else if (GET_CODE (value) == MULT)
5653 op2 = XEXP (value, 1);
5654 if (!CONSTANT_P (op2)
5655 && !(GET_CODE (op2) == REG && op2 != subtarget))
5657 tmp = force_operand (XEXP (value, 0), subtarget);
5658 return expand_mult (GET_MODE (value), tmp,
5659 force_operand (op2, NULL_RTX),
5665 op2 = XEXP (value, 1);
5666 if (!CONSTANT_P (op2)
5667 && !(GET_CODE (op2) == REG && op2 != subtarget))
5669 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5671 binoptab = add_optab;
5672 op2 = negate_rtx (GET_MODE (value), op2);
5675 /* Check for an addition with OP2 a constant integer and our first
5676 operand a PLUS of a virtual register and something else. In that
5677 case, we want to emit the sum of the virtual register and the
5678 constant first and then add the other value. This allows virtual
5679 register instantiation to simply modify the constant rather than
5680 creating another one around this addition. */
5681 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5682 && GET_CODE (XEXP (value, 0)) == PLUS
5683 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5684 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5685 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5687 rtx temp = expand_binop (GET_MODE (value), binoptab,
5688 XEXP (XEXP (value, 0), 0), op2,
5689 subtarget, 0, OPTAB_LIB_WIDEN);
5690 return expand_binop (GET_MODE (value), binoptab, temp,
5691 force_operand (XEXP (XEXP (value, 0), 1), 0),
5692 target, 0, OPTAB_LIB_WIDEN);
5695 tmp = force_operand (XEXP (value, 0), subtarget);
5696 return expand_binop (GET_MODE (value), binoptab, tmp,
5697 force_operand (op2, NULL_RTX),
5698 target, 0, OPTAB_LIB_WIDEN);
5699 /* We give UNSIGNEDP = 0 to expand_binop
5700 because the only operations we are expanding here are signed ones. */
5705 /* Subroutine of expand_expr: return nonzero iff there is no way that
5706 EXP can reference X, which is being modified. TOP_P is nonzero if this
5707 call is going to be used to determine whether we need a temporary
5708 for EXP, as opposed to a recursive call to this function.
5710 It is always safe for this routine to return zero since it merely
5711 searches for optimization opportunities. */
5714 safe_from_p (x, exp, top_p)
5721 static tree save_expr_list;
5724 /* If EXP has varying size, we MUST use a target since we currently
5725 have no way of allocating temporaries of variable size
5726 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5727 So we assume here that something at a higher level has prevented a
5728 clash. This is somewhat bogus, but the best we can do. Only
5729 do this when X is BLKmode and when we are at the top level. */
5730 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5731 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5732 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5733 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5734 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5736 && GET_MODE (x) == BLKmode)
5737 /* If X is in the outgoing argument area, it is always safe. */
5738 || (GET_CODE (x) == MEM
5739 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5740 || (GET_CODE (XEXP (x, 0)) == PLUS
5741 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5744 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5745 find the underlying pseudo. */
5746 if (GET_CODE (x) == SUBREG)
5749 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5753 /* A SAVE_EXPR might appear many times in the expression passed to the
5754 top-level safe_from_p call, and if it has a complex subexpression,
5755 examining it multiple times could result in a combinatorial explosion.
5756 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5757 with optimization took about 28 minutes to compile -- even though it was
5758 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5759 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5760 we have processed. Note that the only test of top_p was above. */
5769 rtn = safe_from_p (x, exp, 0);
5771 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5772 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5777 /* Now look at our tree code and possibly recurse. */
5778 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5781 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5788 if (TREE_CODE (exp) == TREE_LIST)
5789 return ((TREE_VALUE (exp) == 0
5790 || safe_from_p (x, TREE_VALUE (exp), 0))
5791 && (TREE_CHAIN (exp) == 0
5792 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5793 else if (TREE_CODE (exp) == ERROR_MARK)
5794 return 1; /* An already-visited SAVE_EXPR? */
5799 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5803 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5804 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5808 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5809 the expression. If it is set, we conflict iff we are that rtx or
5810 both are in memory. Otherwise, we check all operands of the
5811 expression recursively. */
5813 switch (TREE_CODE (exp))
5816 /* If the operand is static or we are static, we can't conflict.
5817 Likewise if we don't conflict with the operand at all. */
5818 if (staticp (TREE_OPERAND (exp, 0))
5819 || TREE_STATIC (exp)
5820 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5823 /* Otherwise, the only way this can conflict is if we are taking
5824 the address of a DECL a that address if part of X, which is
5826 exp = TREE_OPERAND (exp, 0);
5829 if (!DECL_RTL_SET_P (exp)
5830 || GET_CODE (DECL_RTL (exp)) != MEM)
5833 exp_rtl = XEXP (DECL_RTL (exp), 0);
5838 if (GET_CODE (x) == MEM
5839 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5840 get_alias_set (exp)))
5845 /* Assume that the call will clobber all hard registers and
5847 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5848 || GET_CODE (x) == MEM)
5853 /* If a sequence exists, we would have to scan every instruction
5854 in the sequence to see if it was safe. This is probably not
5856 if (RTL_EXPR_SEQUENCE (exp))
5859 exp_rtl = RTL_EXPR_RTL (exp);
5862 case WITH_CLEANUP_EXPR:
5863 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5866 case CLEANUP_POINT_EXPR:
5867 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5870 exp_rtl = SAVE_EXPR_RTL (exp);
5874 /* If we've already scanned this, don't do it again. Otherwise,
5875 show we've scanned it and record for clearing the flag if we're
5877 if (TREE_PRIVATE (exp))
5880 TREE_PRIVATE (exp) = 1;
5881 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5883 TREE_PRIVATE (exp) = 0;
5887 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5891 /* The only operand we look at is operand 1. The rest aren't
5892 part of the expression. */
5893 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5895 case METHOD_CALL_EXPR:
5896 /* This takes an rtx argument, but shouldn't appear here. */
5903 /* If we have an rtx, we do not need to scan our operands. */
5907 nops = first_rtl_op (TREE_CODE (exp));
5908 for (i = 0; i < nops; i++)
5909 if (TREE_OPERAND (exp, i) != 0
5910 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5913 /* If this is a language-specific tree code, it may require
5914 special handling. */
5915 if ((unsigned int) TREE_CODE (exp)
5916 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5918 && !(*lang_safe_from_p) (x, exp))
5922 /* If we have an rtl, find any enclosed object. Then see if we conflict
5926 if (GET_CODE (exp_rtl) == SUBREG)
5928 exp_rtl = SUBREG_REG (exp_rtl);
5929 if (GET_CODE (exp_rtl) == REG
5930 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5934 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5935 are memory and they conflict. */
5936 return ! (rtx_equal_p (x, exp_rtl)
5937 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5938 && true_dependence (exp_rtl, GET_MODE (x), x,
5939 rtx_addr_varies_p)));
5942 /* If we reach here, it is safe. */
5946 /* Subroutine of expand_expr: return rtx if EXP is a
5947 variable or parameter; else return 0. */
5954 switch (TREE_CODE (exp))
5958 return DECL_RTL (exp);
5964 #ifdef MAX_INTEGER_COMPUTATION_MODE
5967 check_max_integer_computation_mode (exp)
5970 enum tree_code code;
5971 enum machine_mode mode;
5973 /* Strip any NOPs that don't change the mode. */
5975 code = TREE_CODE (exp);
5977 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5978 if (code == NOP_EXPR
5979 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5982 /* First check the type of the overall operation. We need only look at
5983 unary, binary and relational operations. */
5984 if (TREE_CODE_CLASS (code) == '1'
5985 || TREE_CODE_CLASS (code) == '2'
5986 || TREE_CODE_CLASS (code) == '<')
5988 mode = TYPE_MODE (TREE_TYPE (exp));
5989 if (GET_MODE_CLASS (mode) == MODE_INT
5990 && mode > MAX_INTEGER_COMPUTATION_MODE)
5991 internal_error ("unsupported wide integer operation");
5994 /* Check operand of a unary op. */
5995 if (TREE_CODE_CLASS (code) == '1')
5997 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5998 if (GET_MODE_CLASS (mode) == MODE_INT
5999 && mode > MAX_INTEGER_COMPUTATION_MODE)
6000 internal_error ("unsupported wide integer operation");
6003 /* Check operands of a binary/comparison op. */
6004 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6006 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6007 if (GET_MODE_CLASS (mode) == MODE_INT
6008 && mode > MAX_INTEGER_COMPUTATION_MODE)
6009 internal_error ("unsupported wide integer operation");
6011 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6012 if (GET_MODE_CLASS (mode) == MODE_INT
6013 && mode > MAX_INTEGER_COMPUTATION_MODE)
6014 internal_error ("unsupported wide integer operation");
6019 /* Return the highest power of two that EXP is known to be a multiple of.
6020 This is used in updating alignment of MEMs in array references. */
6022 static HOST_WIDE_INT
6023 highest_pow2_factor (exp)
6026 HOST_WIDE_INT c0, c1;
6028 switch (TREE_CODE (exp))
6031 /* If the integer is expressable in a HOST_WIDE_INT, we can find
6032 the lowest bit that's a one. If the result is zero or negative,
6033 pessimize by returning 1. This is overly-conservative, but such
6034 things should not happen in the offset expressions that we are
6036 if (host_integerp (exp, 0))
6038 c0 = tree_low_cst (exp, 0);
6039 return c0 >= 0 ? c0 & -c0 : 1;
6043 case PLUS_EXPR: case MINUS_EXPR:
6044 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6045 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6046 return MIN (c0, c1);
6049 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6050 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6053 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6055 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6056 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6057 return MAX (1, c0 / c1);
6059 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6060 case COMPOUND_EXPR: case SAVE_EXPR:
6061 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6064 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6065 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6066 return MIN (c0, c1);
6075 /* Return an object on the placeholder list that matches EXP, a
6076 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6077 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6078 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6079 is a location which initially points to a starting location in the
6080 placeholder list (zero means start of the list) and where a pointer into
6081 the placeholder list at which the object is found is placed. */
6084 find_placeholder (exp, plist)
6088 tree type = TREE_TYPE (exp);
6089 tree placeholder_expr;
6091 for (placeholder_expr
6092 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6093 placeholder_expr != 0;
6094 placeholder_expr = TREE_CHAIN (placeholder_expr))
6096 tree need_type = TYPE_MAIN_VARIANT (type);
6099 /* Find the outermost reference that is of the type we want. If none,
6100 see if any object has a type that is a pointer to the type we
6102 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6103 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6104 || TREE_CODE (elt) == COND_EXPR)
6105 ? TREE_OPERAND (elt, 1)
6106 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6107 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6108 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6109 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6110 ? TREE_OPERAND (elt, 0) : 0))
6111 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6114 *plist = placeholder_expr;
6118 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6120 = ((TREE_CODE (elt) == COMPOUND_EXPR
6121 || TREE_CODE (elt) == COND_EXPR)
6122 ? TREE_OPERAND (elt, 1)
6123 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6124 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6125 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6126 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6127 ? TREE_OPERAND (elt, 0) : 0))
6128 if (POINTER_TYPE_P (TREE_TYPE (elt))
6129 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6133 *plist = placeholder_expr;
6134 return build1 (INDIRECT_REF, need_type, elt);
6141 /* expand_expr: generate code for computing expression EXP.
6142 An rtx for the computed value is returned. The value is never null.
6143 In the case of a void EXP, const0_rtx is returned.
6145 The value may be stored in TARGET if TARGET is nonzero.
6146 TARGET is just a suggestion; callers must assume that
6147 the rtx returned may not be the same as TARGET.
6149 If TARGET is CONST0_RTX, it means that the value will be ignored.
6151 If TMODE is not VOIDmode, it suggests generating the
6152 result in mode TMODE. But this is done only when convenient.
6153 Otherwise, TMODE is ignored and the value generated in its natural mode.
6154 TMODE is just a suggestion; callers must assume that
6155 the rtx returned may not have mode TMODE.
6157 Note that TARGET may have neither TMODE nor MODE. In that case, it
6158 probably will not be used.
6160 If MODIFIER is EXPAND_SUM then when EXP is an addition
6161 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6162 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6163 products as above, or REG or MEM, or constant.
6164 Ordinarily in such cases we would output mul or add instructions
6165 and then return a pseudo reg containing the sum.
6167 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6168 it also marks a label as absolutely required (it can't be dead).
6169 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6170 This is used for outputting expressions used in initializers.
6172 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6173 with a constant address even if that address is not normally legitimate.
6174 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6177 expand_expr (exp, target, tmode, modifier)
6180 enum machine_mode tmode;
6181 enum expand_modifier modifier;
6184 tree type = TREE_TYPE (exp);
6185 int unsignedp = TREE_UNSIGNED (type);
6186 enum machine_mode mode;
6187 enum tree_code code = TREE_CODE (exp);
6189 rtx subtarget, original_target;
6192 /* Used by check-memory-usage to make modifier read only. */
6193 enum expand_modifier ro_modifier;
6195 /* Handle ERROR_MARK before anybody tries to access its type. */
6196 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6198 op0 = CONST0_RTX (tmode);
6204 mode = TYPE_MODE (type);
6205 /* Use subtarget as the target for operand 0 of a binary operation. */
6206 subtarget = get_subtarget (target);
6207 original_target = target;
6208 ignore = (target == const0_rtx
6209 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6210 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6211 || code == COND_EXPR)
6212 && TREE_CODE (type) == VOID_TYPE));
6214 /* Make a read-only version of the modifier. */
6215 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6216 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6217 ro_modifier = modifier;
6219 ro_modifier = EXPAND_NORMAL;
6221 /* If we are going to ignore this result, we need only do something
6222 if there is a side-effect somewhere in the expression. If there
6223 is, short-circuit the most common cases here. Note that we must
6224 not call expand_expr with anything but const0_rtx in case this
6225 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6229 if (! TREE_SIDE_EFFECTS (exp))
6232 /* Ensure we reference a volatile object even if value is ignored, but
6233 don't do this if all we are doing is taking its address. */
6234 if (TREE_THIS_VOLATILE (exp)
6235 && TREE_CODE (exp) != FUNCTION_DECL
6236 && mode != VOIDmode && mode != BLKmode
6237 && modifier != EXPAND_CONST_ADDRESS)
6239 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6240 if (GET_CODE (temp) == MEM)
6241 temp = copy_to_reg (temp);
6245 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6246 || code == INDIRECT_REF || code == BUFFER_REF)
6247 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6248 VOIDmode, ro_modifier);
6249 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6250 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6252 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6254 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6258 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6259 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6260 /* If the second operand has no side effects, just evaluate
6262 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6263 VOIDmode, ro_modifier);
6264 else if (code == BIT_FIELD_REF)
6266 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6268 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6270 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6278 #ifdef MAX_INTEGER_COMPUTATION_MODE
6279 /* Only check stuff here if the mode we want is different from the mode
6280 of the expression; if it's the same, check_max_integer_computiation_mode
6281 will handle it. Do we really need to check this stuff at all? */
6284 && GET_MODE (target) != mode
6285 && TREE_CODE (exp) != INTEGER_CST
6286 && TREE_CODE (exp) != PARM_DECL
6287 && TREE_CODE (exp) != ARRAY_REF
6288 && TREE_CODE (exp) != ARRAY_RANGE_REF
6289 && TREE_CODE (exp) != COMPONENT_REF
6290 && TREE_CODE (exp) != BIT_FIELD_REF
6291 && TREE_CODE (exp) != INDIRECT_REF
6292 && TREE_CODE (exp) != CALL_EXPR
6293 && TREE_CODE (exp) != VAR_DECL
6294 && TREE_CODE (exp) != RTL_EXPR)
6296 enum machine_mode mode = GET_MODE (target);
6298 if (GET_MODE_CLASS (mode) == MODE_INT
6299 && mode > MAX_INTEGER_COMPUTATION_MODE)
6300 internal_error ("unsupported wide integer operation");
6304 && TREE_CODE (exp) != INTEGER_CST
6305 && TREE_CODE (exp) != PARM_DECL
6306 && TREE_CODE (exp) != ARRAY_REF
6307 && TREE_CODE (exp) != ARRAY_RANGE_REF
6308 && TREE_CODE (exp) != COMPONENT_REF
6309 && TREE_CODE (exp) != BIT_FIELD_REF
6310 && TREE_CODE (exp) != INDIRECT_REF
6311 && TREE_CODE (exp) != VAR_DECL
6312 && TREE_CODE (exp) != CALL_EXPR
6313 && TREE_CODE (exp) != RTL_EXPR
6314 && GET_MODE_CLASS (tmode) == MODE_INT
6315 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6316 internal_error ("unsupported wide integer operation");
6318 check_max_integer_computation_mode (exp);
6321 /* If will do cse, generate all results into pseudo registers
6322 since 1) that allows cse to find more things
6323 and 2) otherwise cse could produce an insn the machine
6326 if (! cse_not_expected && mode != BLKmode && target
6327 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6334 tree function = decl_function_context (exp);
6335 /* Handle using a label in a containing function. */
6336 if (function != current_function_decl
6337 && function != inline_function_decl && function != 0)
6339 struct function *p = find_function_data (function);
6340 p->expr->x_forced_labels
6341 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6342 p->expr->x_forced_labels);
6346 if (modifier == EXPAND_INITIALIZER)
6347 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6352 temp = gen_rtx_MEM (FUNCTION_MODE,
6353 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6354 if (function != current_function_decl
6355 && function != inline_function_decl && function != 0)
6356 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6361 if (DECL_RTL (exp) == 0)
6363 error_with_decl (exp, "prior parameter's size depends on `%s'");
6364 return CONST0_RTX (mode);
6367 /* ... fall through ... */
6370 /* If a static var's type was incomplete when the decl was written,
6371 but the type is complete now, lay out the decl now. */
6372 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6373 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6375 layout_decl (exp, 0);
6376 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6379 /* Although static-storage variables start off initialized, according to
6380 ANSI C, a memcpy could overwrite them with uninitialized values. So
6381 we check them too. This also lets us check for read-only variables
6382 accessed via a non-const declaration, in case it won't be detected
6383 any other way (e.g., in an embedded system or OS kernel without
6386 Aggregates are not checked here; they're handled elsewhere. */
6387 if (cfun && current_function_check_memory_usage
6389 && GET_CODE (DECL_RTL (exp)) == MEM
6390 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6392 enum memory_use_mode memory_usage;
6393 memory_usage = get_memory_usage_from_modifier (modifier);
6395 in_check_memory_usage = 1;
6396 if (memory_usage != MEMORY_USE_DONT)
6397 emit_library_call (chkr_check_addr_libfunc,
6398 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6399 XEXP (DECL_RTL (exp), 0), Pmode,
6400 GEN_INT (int_size_in_bytes (type)),
6401 TYPE_MODE (sizetype),
6402 GEN_INT (memory_usage),
6403 TYPE_MODE (integer_type_node));
6404 in_check_memory_usage = 0;
6407 /* ... fall through ... */
6411 if (DECL_RTL (exp) == 0)
6414 /* Ensure variable marked as used even if it doesn't go through
6415 a parser. If it hasn't be used yet, write out an external
6417 if (! TREE_USED (exp))
6419 assemble_external (exp);
6420 TREE_USED (exp) = 1;
6423 /* Show we haven't gotten RTL for this yet. */
6426 /* Handle variables inherited from containing functions. */
6427 context = decl_function_context (exp);
6429 /* We treat inline_function_decl as an alias for the current function
6430 because that is the inline function whose vars, types, etc.
6431 are being merged into the current function.
6432 See expand_inline_function. */
6434 if (context != 0 && context != current_function_decl
6435 && context != inline_function_decl
6436 /* If var is static, we don't need a static chain to access it. */
6437 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6438 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6442 /* Mark as non-local and addressable. */
6443 DECL_NONLOCAL (exp) = 1;
6444 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6446 mark_addressable (exp);
6447 if (GET_CODE (DECL_RTL (exp)) != MEM)
6449 addr = XEXP (DECL_RTL (exp), 0);
6450 if (GET_CODE (addr) == MEM)
6452 = replace_equiv_address (addr,
6453 fix_lexical_addr (XEXP (addr, 0), exp));
6455 addr = fix_lexical_addr (addr, exp);
6457 temp = replace_equiv_address (DECL_RTL (exp), addr);
6460 /* This is the case of an array whose size is to be determined
6461 from its initializer, while the initializer is still being parsed.
6464 else if (GET_CODE (DECL_RTL (exp)) == MEM
6465 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6466 temp = validize_mem (DECL_RTL (exp));
6468 /* If DECL_RTL is memory, we are in the normal case and either
6469 the address is not valid or it is not a register and -fforce-addr
6470 is specified, get the address into a register. */
6472 else if (GET_CODE (DECL_RTL (exp)) == MEM
6473 && modifier != EXPAND_CONST_ADDRESS
6474 && modifier != EXPAND_SUM
6475 && modifier != EXPAND_INITIALIZER
6476 && (! memory_address_p (DECL_MODE (exp),
6477 XEXP (DECL_RTL (exp), 0))
6479 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6480 temp = replace_equiv_address (DECL_RTL (exp),
6481 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6483 /* If we got something, return it. But first, set the alignment
6484 if the address is a register. */
6487 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6488 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6493 /* If the mode of DECL_RTL does not match that of the decl, it
6494 must be a promoted value. We return a SUBREG of the wanted mode,
6495 but mark it so that we know that it was already extended. */
6497 if (GET_CODE (DECL_RTL (exp)) == REG
6498 && GET_MODE (DECL_RTL (exp)) != mode)
6500 /* Get the signedness used for this variable. Ensure we get the
6501 same mode we got when the variable was declared. */
6502 if (GET_MODE (DECL_RTL (exp))
6503 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6506 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6507 SUBREG_PROMOTED_VAR_P (temp) = 1;
6508 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6512 return DECL_RTL (exp);
6515 return immed_double_const (TREE_INT_CST_LOW (exp),
6516 TREE_INT_CST_HIGH (exp), mode);
6519 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6520 EXPAND_MEMORY_USE_BAD);
6523 /* If optimized, generate immediate CONST_DOUBLE
6524 which will be turned into memory by reload if necessary.
6526 We used to force a register so that loop.c could see it. But
6527 this does not allow gen_* patterns to perform optimizations with
6528 the constants. It also produces two insns in cases like "x = 1.0;".
6529 On most machines, floating-point constants are not permitted in
6530 many insns, so we'd end up copying it to a register in any case.
6532 Now, we do the copying in expand_binop, if appropriate. */
6533 return immed_real_const (exp);
6537 if (! TREE_CST_RTL (exp))
6538 output_constant_def (exp, 1);
6540 /* TREE_CST_RTL probably contains a constant address.
6541 On RISC machines where a constant address isn't valid,
6542 make some insns to get that address into a register. */
6543 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6544 && modifier != EXPAND_CONST_ADDRESS
6545 && modifier != EXPAND_INITIALIZER
6546 && modifier != EXPAND_SUM
6547 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6549 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6550 return replace_equiv_address (TREE_CST_RTL (exp),
6551 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6552 return TREE_CST_RTL (exp);
6554 case EXPR_WITH_FILE_LOCATION:
6557 const char *saved_input_filename = input_filename;
6558 int saved_lineno = lineno;
6559 input_filename = EXPR_WFL_FILENAME (exp);
6560 lineno = EXPR_WFL_LINENO (exp);
6561 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6562 emit_line_note (input_filename, lineno);
6563 /* Possibly avoid switching back and forth here. */
6564 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6565 input_filename = saved_input_filename;
6566 lineno = saved_lineno;
6571 context = decl_function_context (exp);
6573 /* If this SAVE_EXPR was at global context, assume we are an
6574 initialization function and move it into our context. */
6576 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6578 /* We treat inline_function_decl as an alias for the current function
6579 because that is the inline function whose vars, types, etc.
6580 are being merged into the current function.
6581 See expand_inline_function. */
6582 if (context == current_function_decl || context == inline_function_decl)
6585 /* If this is non-local, handle it. */
6588 /* The following call just exists to abort if the context is
6589 not of a containing function. */
6590 find_function_data (context);
6592 temp = SAVE_EXPR_RTL (exp);
6593 if (temp && GET_CODE (temp) == REG)
6595 put_var_into_stack (exp);
6596 temp = SAVE_EXPR_RTL (exp);
6598 if (temp == 0 || GET_CODE (temp) != MEM)
6601 replace_equiv_address (temp,
6602 fix_lexical_addr (XEXP (temp, 0), exp));
6604 if (SAVE_EXPR_RTL (exp) == 0)
6606 if (mode == VOIDmode)
6609 temp = assign_temp (build_qualified_type (type,
6611 | TYPE_QUAL_CONST)),
6614 SAVE_EXPR_RTL (exp) = temp;
6615 if (!optimize && GET_CODE (temp) == REG)
6616 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6619 /* If the mode of TEMP does not match that of the expression, it
6620 must be a promoted value. We pass store_expr a SUBREG of the
6621 wanted mode but mark it so that we know that it was already
6622 extended. Note that `unsignedp' was modified above in
6625 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6627 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6628 SUBREG_PROMOTED_VAR_P (temp) = 1;
6629 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6632 if (temp == const0_rtx)
6633 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6634 EXPAND_MEMORY_USE_BAD);
6636 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6638 TREE_USED (exp) = 1;
6641 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6642 must be a promoted value. We return a SUBREG of the wanted mode,
6643 but mark it so that we know that it was already extended. */
6645 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6646 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6648 /* Compute the signedness and make the proper SUBREG. */
6649 promote_mode (type, mode, &unsignedp, 0);
6650 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6651 SUBREG_PROMOTED_VAR_P (temp) = 1;
6652 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6656 return SAVE_EXPR_RTL (exp);
6661 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6662 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6666 case PLACEHOLDER_EXPR:
6668 tree old_list = placeholder_list;
6669 tree placeholder_expr = 0;
6671 exp = find_placeholder (exp, &placeholder_expr);
6675 placeholder_list = TREE_CHAIN (placeholder_expr);
6676 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6677 placeholder_list = old_list;
6681 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6684 case WITH_RECORD_EXPR:
6685 /* Put the object on the placeholder list, expand our first operand,
6686 and pop the list. */
6687 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6689 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6690 tmode, ro_modifier);
6691 placeholder_list = TREE_CHAIN (placeholder_list);
6695 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6696 expand_goto (TREE_OPERAND (exp, 0));
6698 expand_computed_goto (TREE_OPERAND (exp, 0));
6702 expand_exit_loop_if_false (NULL,
6703 invert_truthvalue (TREE_OPERAND (exp, 0)));
6706 case LABELED_BLOCK_EXPR:
6707 if (LABELED_BLOCK_BODY (exp))
6708 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6709 /* Should perhaps use expand_label, but this is simpler and safer. */
6710 do_pending_stack_adjust ();
6711 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6714 case EXIT_BLOCK_EXPR:
6715 if (EXIT_BLOCK_RETURN (exp))
6716 sorry ("returned value in block_exit_expr");
6717 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6722 expand_start_loop (1);
6723 expand_expr_stmt (TREE_OPERAND (exp, 0));
6731 tree vars = TREE_OPERAND (exp, 0);
6732 int vars_need_expansion = 0;
6734 /* Need to open a binding contour here because
6735 if there are any cleanups they must be contained here. */
6736 expand_start_bindings (2);
6738 /* Mark the corresponding BLOCK for output in its proper place. */
6739 if (TREE_OPERAND (exp, 2) != 0
6740 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6741 insert_block (TREE_OPERAND (exp, 2));
6743 /* If VARS have not yet been expanded, expand them now. */
6746 if (!DECL_RTL_SET_P (vars))
6748 vars_need_expansion = 1;
6751 expand_decl_init (vars);
6752 vars = TREE_CHAIN (vars);
6755 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6757 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6763 if (RTL_EXPR_SEQUENCE (exp))
6765 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6767 emit_insns (RTL_EXPR_SEQUENCE (exp));
6768 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6770 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6771 free_temps_for_rtl_expr (exp);
6772 return RTL_EXPR_RTL (exp);
6775 /* If we don't need the result, just ensure we evaluate any
6780 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6781 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6782 EXPAND_MEMORY_USE_BAD);
6786 /* All elts simple constants => refer to a constant in memory. But
6787 if this is a non-BLKmode mode, let it store a field at a time
6788 since that should make a CONST_INT or CONST_DOUBLE when we
6789 fold. Likewise, if we have a target we can use, it is best to
6790 store directly into the target unless the type is large enough
6791 that memcpy will be used. If we are making an initializer and
6792 all operands are constant, put it in memory as well. */
6793 else if ((TREE_STATIC (exp)
6794 && ((mode == BLKmode
6795 && ! (target != 0 && safe_from_p (target, exp, 1)))
6796 || TREE_ADDRESSABLE (exp)
6797 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6798 && (! MOVE_BY_PIECES_P
6799 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6801 && ! mostly_zeros_p (exp))))
6802 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6804 rtx constructor = output_constant_def (exp, 1);
6806 if (modifier != EXPAND_CONST_ADDRESS
6807 && modifier != EXPAND_INITIALIZER
6808 && modifier != EXPAND_SUM)
6809 constructor = validize_mem (constructor);
6815 /* Handle calls that pass values in multiple non-contiguous
6816 locations. The Irix 6 ABI has examples of this. */
6817 if (target == 0 || ! safe_from_p (target, exp, 1)
6818 || GET_CODE (target) == PARALLEL)
6820 = assign_temp (build_qualified_type (type,
6822 | (TREE_READONLY (exp)
6823 * TYPE_QUAL_CONST))),
6824 TREE_ADDRESSABLE (exp), 1, 1);
6826 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6827 int_size_in_bytes (TREE_TYPE (exp)));
6833 tree exp1 = TREE_OPERAND (exp, 0);
6835 tree string = string_constant (exp1, &index);
6837 /* Try to optimize reads from const strings. */
6839 && TREE_CODE (string) == STRING_CST
6840 && TREE_CODE (index) == INTEGER_CST
6841 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6842 && GET_MODE_CLASS (mode) == MODE_INT
6843 && GET_MODE_SIZE (mode) == 1
6844 && modifier != EXPAND_MEMORY_USE_WO)
6846 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6848 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6849 op0 = memory_address (mode, op0);
6851 if (cfun && current_function_check_memory_usage
6852 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6854 enum memory_use_mode memory_usage;
6855 memory_usage = get_memory_usage_from_modifier (modifier);
6857 if (memory_usage != MEMORY_USE_DONT)
6859 in_check_memory_usage = 1;
6860 emit_library_call (chkr_check_addr_libfunc,
6861 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6862 Pmode, GEN_INT (int_size_in_bytes (type)),
6863 TYPE_MODE (sizetype),
6864 GEN_INT (memory_usage),
6865 TYPE_MODE (integer_type_node));
6866 in_check_memory_usage = 0;
6870 temp = gen_rtx_MEM (mode, op0);
6871 set_mem_attributes (temp, exp, 0);
6873 /* If we are writing to this object and its type is a record with
6874 readonly fields, we must mark it as readonly so it will
6875 conflict with readonly references to those fields. */
6876 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6877 RTX_UNCHANGING_P (temp) = 1;
6883 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6887 tree array = TREE_OPERAND (exp, 0);
6888 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6889 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6890 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6893 /* Optimize the special-case of a zero lower bound.
6895 We convert the low_bound to sizetype to avoid some problems
6896 with constant folding. (E.g. suppose the lower bound is 1,
6897 and its mode is QI. Without the conversion, (ARRAY
6898 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6899 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6901 if (! integer_zerop (low_bound))
6902 index = size_diffop (index, convert (sizetype, low_bound));
6904 /* Fold an expression like: "foo"[2].
6905 This is not done in fold so it won't happen inside &.
6906 Don't fold if this is for wide characters since it's too
6907 difficult to do correctly and this is a very rare case. */
6909 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6910 && TREE_CODE (array) == STRING_CST
6911 && TREE_CODE (index) == INTEGER_CST
6912 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6913 && GET_MODE_CLASS (mode) == MODE_INT
6914 && GET_MODE_SIZE (mode) == 1)
6916 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6918 /* If this is a constant index into a constant array,
6919 just get the value from the array. Handle both the cases when
6920 we have an explicit constructor and when our operand is a variable
6921 that was declared const. */
6923 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6924 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6925 && TREE_CODE (index) == INTEGER_CST
6926 && 0 > compare_tree_int (index,
6927 list_length (CONSTRUCTOR_ELTS
6928 (TREE_OPERAND (exp, 0)))))
6932 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6933 i = TREE_INT_CST_LOW (index);
6934 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6938 return expand_expr (fold (TREE_VALUE (elem)), target,
6939 tmode, ro_modifier);
6942 else if (optimize >= 1
6943 && modifier != EXPAND_CONST_ADDRESS
6944 && modifier != EXPAND_INITIALIZER
6945 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6946 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6947 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6949 if (TREE_CODE (index) == INTEGER_CST)
6951 tree init = DECL_INITIAL (array);
6953 if (TREE_CODE (init) == CONSTRUCTOR)
6957 for (elem = CONSTRUCTOR_ELTS (init);
6959 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6960 elem = TREE_CHAIN (elem))
6963 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6964 return expand_expr (fold (TREE_VALUE (elem)), target,
6965 tmode, ro_modifier);
6967 else if (TREE_CODE (init) == STRING_CST
6968 && 0 > compare_tree_int (index,
6969 TREE_STRING_LENGTH (init)))
6971 tree type = TREE_TYPE (TREE_TYPE (init));
6972 enum machine_mode mode = TYPE_MODE (type);
6974 if (GET_MODE_CLASS (mode) == MODE_INT
6975 && GET_MODE_SIZE (mode) == 1)
6977 (TREE_STRING_POINTER
6978 (init)[TREE_INT_CST_LOW (index)]));
6987 case ARRAY_RANGE_REF:
6988 /* If the operand is a CONSTRUCTOR, we can just extract the
6989 appropriate field if it is present. Don't do this if we have
6990 already written the data since we want to refer to that copy
6991 and varasm.c assumes that's what we'll do. */
6992 if (code == COMPONENT_REF
6993 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6994 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6998 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6999 elt = TREE_CHAIN (elt))
7000 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7001 /* We can normally use the value of the field in the
7002 CONSTRUCTOR. However, if this is a bitfield in
7003 an integral mode that we can fit in a HOST_WIDE_INT,
7004 we must mask only the number of bits in the bitfield,
7005 since this is done implicitly by the constructor. If
7006 the bitfield does not meet either of those conditions,
7007 we can't do this optimization. */
7008 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7009 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7011 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7012 <= HOST_BITS_PER_WIDE_INT))))
7014 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7015 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7017 HOST_WIDE_INT bitsize
7018 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7020 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7022 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7023 op0 = expand_and (op0, op1, target);
7027 enum machine_mode imode
7028 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7030 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7033 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7035 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7045 enum machine_mode mode1;
7046 HOST_WIDE_INT bitsize, bitpos;
7049 unsigned int alignment;
7050 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7051 &mode1, &unsignedp, &volatilep,
7055 /* If we got back the original object, something is wrong. Perhaps
7056 we are evaluating an expression too early. In any event, don't
7057 infinitely recurse. */
7061 /* If TEM's type is a union of variable size, pass TARGET to the inner
7062 computation, since it will need a temporary and TARGET is known
7063 to have to do. This occurs in unchecked conversion in Ada. */
7067 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7068 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7070 ? target : NULL_RTX),
7072 (modifier == EXPAND_INITIALIZER
7073 || modifier == EXPAND_CONST_ADDRESS)
7074 ? modifier : EXPAND_NORMAL);
7076 /* If this is a constant, put it into a register if it is a
7077 legitimate constant and OFFSET is 0 and memory if it isn't. */
7078 if (CONSTANT_P (op0))
7080 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7081 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7083 op0 = force_reg (mode, op0);
7085 op0 = validize_mem (force_const_mem (mode, op0));
7090 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7092 /* If this object is in a register, put it into memory.
7093 This case can't occur in C, but can in Ada if we have
7094 unchecked conversion of an expression from a scalar type to
7095 an array or record type. */
7096 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7097 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7099 /* If the operand is a SAVE_EXPR, we can deal with this by
7100 forcing the SAVE_EXPR into memory. */
7101 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7103 put_var_into_stack (TREE_OPERAND (exp, 0));
7104 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7109 = build_qualified_type (TREE_TYPE (tem),
7110 (TYPE_QUALS (TREE_TYPE (tem))
7111 | TYPE_QUAL_CONST));
7112 rtx memloc = assign_temp (nt, 1, 1, 1);
7114 mark_temp_addr_taken (memloc);
7115 emit_move_insn (memloc, op0);
7120 if (GET_CODE (op0) != MEM)
7123 if (GET_MODE (offset_rtx) != ptr_mode)
7124 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7126 #ifdef POINTERS_EXTEND_UNSIGNED
7127 if (GET_MODE (offset_rtx) != Pmode)
7128 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7131 /* A constant address in OP0 can have VOIDmode, we must not try
7132 to call force_reg for that case. Avoid that case. */
7133 if (GET_CODE (op0) == MEM
7134 && GET_MODE (op0) == BLKmode
7135 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7137 && (bitpos % bitsize) == 0
7138 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7139 && alignment == GET_MODE_ALIGNMENT (mode1))
7141 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7143 if (GET_CODE (XEXP (temp, 0)) == REG)
7146 op0 = (replace_equiv_address
7148 force_reg (GET_MODE (XEXP (temp, 0)),
7153 op0 = offset_address (op0, offset_rtx,
7154 highest_pow2_factor (offset));
7157 /* Don't forget about volatility even if this is a bitfield. */
7158 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7160 if (op0 == orig_op0)
7161 op0 = copy_rtx (op0);
7163 MEM_VOLATILE_P (op0) = 1;
7166 /* Check the access. */
7167 if (cfun != 0 && current_function_check_memory_usage
7168 && GET_CODE (op0) == MEM)
7170 enum memory_use_mode memory_usage;
7171 memory_usage = get_memory_usage_from_modifier (modifier);
7173 if (memory_usage != MEMORY_USE_DONT)
7178 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7179 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7181 /* Check the access right of the pointer. */
7182 in_check_memory_usage = 1;
7183 if (size > BITS_PER_UNIT)
7184 emit_library_call (chkr_check_addr_libfunc,
7185 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7186 Pmode, GEN_INT (size / BITS_PER_UNIT),
7187 TYPE_MODE (sizetype),
7188 GEN_INT (memory_usage),
7189 TYPE_MODE (integer_type_node));
7190 in_check_memory_usage = 0;
7194 /* In cases where an aligned union has an unaligned object
7195 as a field, we might be extracting a BLKmode value from
7196 an integer-mode (e.g., SImode) object. Handle this case
7197 by doing the extract into an object as wide as the field
7198 (which we know to be the width of a basic mode), then
7199 storing into memory, and changing the mode to BLKmode. */
7200 if (mode1 == VOIDmode
7201 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7202 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7203 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7204 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7205 && modifier != EXPAND_CONST_ADDRESS
7206 && modifier != EXPAND_INITIALIZER)
7207 /* If the field isn't aligned enough to fetch as a memref,
7208 fetch it as a bit field. */
7209 || (mode1 != BLKmode
7210 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7211 && ((TYPE_ALIGN (TREE_TYPE (tem))
7212 < GET_MODE_ALIGNMENT (mode))
7213 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7214 /* If the type and the field are a constant size and the
7215 size of the type isn't the same size as the bitfield,
7216 we must use bitfield operations. */
7218 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7220 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7223 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7224 && (TYPE_ALIGN (type) > alignment
7225 || bitpos % TYPE_ALIGN (type) != 0)))
7227 enum machine_mode ext_mode = mode;
7229 if (ext_mode == BLKmode
7230 && ! (target != 0 && GET_CODE (op0) == MEM
7231 && GET_CODE (target) == MEM
7232 && bitpos % BITS_PER_UNIT == 0))
7233 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7235 if (ext_mode == BLKmode)
7237 /* In this case, BITPOS must start at a byte boundary and
7238 TARGET, if specified, must be a MEM. */
7239 if (GET_CODE (op0) != MEM
7240 || (target != 0 && GET_CODE (target) != MEM)
7241 || bitpos % BITS_PER_UNIT != 0)
7244 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7246 target = assign_temp (type, 0, 1, 1);
7248 emit_block_move (target, op0,
7249 bitsize == -1 ? expr_size (exp)
7250 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7256 op0 = validize_mem (op0);
7258 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7259 mark_reg_pointer (XEXP (op0, 0), alignment);
7261 op0 = extract_bit_field (op0, bitsize, bitpos,
7262 unsignedp, target, ext_mode, ext_mode,
7264 int_size_in_bytes (TREE_TYPE (tem)));
7266 /* If the result is a record type and BITSIZE is narrower than
7267 the mode of OP0, an integral mode, and this is a big endian
7268 machine, we must put the field into the high-order bits. */
7269 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7270 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7271 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7272 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7273 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7277 if (mode == BLKmode)
7279 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7281 rtx new = assign_temp (nt, 0, 1, 1);
7283 emit_move_insn (new, op0);
7284 op0 = copy_rtx (new);
7285 PUT_MODE (op0, BLKmode);
7291 /* If the result is BLKmode, use that to access the object
7293 if (mode == BLKmode)
7296 /* Get a reference to just this component. */
7297 if (modifier == EXPAND_CONST_ADDRESS
7298 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7299 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7301 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7303 if (op0 == orig_op0)
7304 op0 = copy_rtx (op0);
7306 set_mem_attributes (op0, exp, 0);
7307 if (GET_CODE (XEXP (op0, 0)) == REG)
7308 mark_reg_pointer (XEXP (op0, 0), alignment);
7310 MEM_VOLATILE_P (op0) |= volatilep;
7311 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7312 || modifier == EXPAND_CONST_ADDRESS
7313 || modifier == EXPAND_INITIALIZER)
7315 else if (target == 0)
7316 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7318 convert_move (target, op0, unsignedp);
7324 rtx insn, before = get_last_insn (), vtbl_ref;
7326 /* Evaluate the interior expression. */
7327 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7330 /* Get or create an instruction off which to hang a note. */
7331 if (REG_P (subtarget))
7334 insn = get_last_insn ();
7337 if (! INSN_P (insn))
7338 insn = prev_nonnote_insn (insn);
7342 target = gen_reg_rtx (GET_MODE (subtarget));
7343 insn = emit_move_insn (target, subtarget);
7346 /* Collect the data for the note. */
7347 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7348 vtbl_ref = plus_constant (vtbl_ref,
7349 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7350 /* Discard the initial CONST that was added. */
7351 vtbl_ref = XEXP (vtbl_ref, 0);
7354 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7359 /* Intended for a reference to a buffer of a file-object in Pascal.
7360 But it's not certain that a special tree code will really be
7361 necessary for these. INDIRECT_REF might work for them. */
7367 /* Pascal set IN expression.
7370 rlo = set_low - (set_low%bits_per_word);
7371 the_word = set [ (index - rlo)/bits_per_word ];
7372 bit_index = index % bits_per_word;
7373 bitmask = 1 << bit_index;
7374 return !!(the_word & bitmask); */
7376 tree set = TREE_OPERAND (exp, 0);
7377 tree index = TREE_OPERAND (exp, 1);
7378 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7379 tree set_type = TREE_TYPE (set);
7380 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7381 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7382 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7383 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7384 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7385 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7386 rtx setaddr = XEXP (setval, 0);
7387 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7389 rtx diff, quo, rem, addr, bit, result;
7391 /* If domain is empty, answer is no. Likewise if index is constant
7392 and out of bounds. */
7393 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7394 && TREE_CODE (set_low_bound) == INTEGER_CST
7395 && tree_int_cst_lt (set_high_bound, set_low_bound))
7396 || (TREE_CODE (index) == INTEGER_CST
7397 && TREE_CODE (set_low_bound) == INTEGER_CST
7398 && tree_int_cst_lt (index, set_low_bound))
7399 || (TREE_CODE (set_high_bound) == INTEGER_CST
7400 && TREE_CODE (index) == INTEGER_CST
7401 && tree_int_cst_lt (set_high_bound, index))))
7405 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7407 /* If we get here, we have to generate the code for both cases
7408 (in range and out of range). */
7410 op0 = gen_label_rtx ();
7411 op1 = gen_label_rtx ();
7413 if (! (GET_CODE (index_val) == CONST_INT
7414 && GET_CODE (lo_r) == CONST_INT))
7416 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7417 GET_MODE (index_val), iunsignedp, 0, op1);
7420 if (! (GET_CODE (index_val) == CONST_INT
7421 && GET_CODE (hi_r) == CONST_INT))
7423 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7424 GET_MODE (index_val), iunsignedp, 0, op1);
7427 /* Calculate the element number of bit zero in the first word
7429 if (GET_CODE (lo_r) == CONST_INT)
7430 rlow = GEN_INT (INTVAL (lo_r)
7431 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7433 rlow = expand_binop (index_mode, and_optab, lo_r,
7434 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7435 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7437 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7438 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7440 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7441 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7442 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7443 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7445 addr = memory_address (byte_mode,
7446 expand_binop (index_mode, add_optab, diff,
7447 setaddr, NULL_RTX, iunsignedp,
7450 /* Extract the bit we want to examine. */
7451 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7452 gen_rtx_MEM (byte_mode, addr),
7453 make_tree (TREE_TYPE (index), rem),
7455 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7456 GET_MODE (target) == byte_mode ? target : 0,
7457 1, OPTAB_LIB_WIDEN);
7459 if (result != target)
7460 convert_move (target, result, 1);
7462 /* Output the code to handle the out-of-range case. */
7465 emit_move_insn (target, const0_rtx);
7470 case WITH_CLEANUP_EXPR:
7471 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7473 WITH_CLEANUP_EXPR_RTL (exp)
7474 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7475 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7477 /* That's it for this cleanup. */
7478 TREE_OPERAND (exp, 1) = 0;
7480 return WITH_CLEANUP_EXPR_RTL (exp);
7482 case CLEANUP_POINT_EXPR:
7484 /* Start a new binding layer that will keep track of all cleanup
7485 actions to be performed. */
7486 expand_start_bindings (2);
7488 target_temp_slot_level = temp_slot_level;
7490 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7491 /* If we're going to use this value, load it up now. */
7493 op0 = force_not_mem (op0);
7494 preserve_temp_slots (op0);
7495 expand_end_bindings (NULL_TREE, 0, 0);
7500 /* Check for a built-in function. */
7501 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7502 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7504 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7506 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7507 == BUILT_IN_FRONTEND)
7508 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7510 return expand_builtin (exp, target, subtarget, tmode, ignore);
7513 return expand_call (exp, target, ignore);
7515 case NON_LVALUE_EXPR:
7518 case REFERENCE_EXPR:
7519 if (TREE_OPERAND (exp, 0) == error_mark_node)
7522 if (TREE_CODE (type) == UNION_TYPE)
7524 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7526 /* If both input and output are BLKmode, this conversion
7527 isn't actually doing anything unless we need to make the
7528 alignment stricter. */
7529 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7530 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7531 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7532 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7536 target = assign_temp (type, 0, 1, 1);
7538 if (GET_CODE (target) == MEM)
7539 /* Store data into beginning of memory target. */
7540 store_expr (TREE_OPERAND (exp, 0),
7541 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7543 else if (GET_CODE (target) == REG)
7544 /* Store this field into a union of the proper type. */
7545 store_field (target,
7546 MIN ((int_size_in_bytes (TREE_TYPE
7547 (TREE_OPERAND (exp, 0)))
7549 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7550 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7551 VOIDmode, 0, BITS_PER_UNIT,
7552 int_size_in_bytes (type), 0);
7556 /* Return the entire union. */
7560 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7562 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7565 /* If the signedness of the conversion differs and OP0 is
7566 a promoted SUBREG, clear that indication since we now
7567 have to do the proper extension. */
7568 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7569 && GET_CODE (op0) == SUBREG)
7570 SUBREG_PROMOTED_VAR_P (op0) = 0;
7575 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7576 if (GET_MODE (op0) == mode)
7579 /* If OP0 is a constant, just convert it into the proper mode. */
7580 if (CONSTANT_P (op0))
7582 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7583 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7585 if (modifier == EXPAND_INITIALIZER)
7586 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7590 convert_to_mode (mode, op0,
7591 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7593 convert_move (target, op0,
7594 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7598 /* We come here from MINUS_EXPR when the second operand is a
7601 this_optab = ! unsignedp && flag_trapv
7602 && (GET_MODE_CLASS(mode) == MODE_INT)
7603 ? addv_optab : add_optab;
7605 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7606 something else, make sure we add the register to the constant and
7607 then to the other thing. This case can occur during strength
7608 reduction and doing it this way will produce better code if the
7609 frame pointer or argument pointer is eliminated.
7611 fold-const.c will ensure that the constant is always in the inner
7612 PLUS_EXPR, so the only case we need to do anything about is if
7613 sp, ap, or fp is our second argument, in which case we must swap
7614 the innermost first argument and our second argument. */
7616 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7617 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7618 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7619 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7620 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7621 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7623 tree t = TREE_OPERAND (exp, 1);
7625 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7626 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7629 /* If the result is to be ptr_mode and we are adding an integer to
7630 something, we might be forming a constant. So try to use
7631 plus_constant. If it produces a sum and we can't accept it,
7632 use force_operand. This allows P = &ARR[const] to generate
7633 efficient code on machines where a SYMBOL_REF is not a valid
7636 If this is an EXPAND_SUM call, always return the sum. */
7637 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7638 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7640 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7641 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7642 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7646 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7648 /* Use immed_double_const to ensure that the constant is
7649 truncated according to the mode of OP1, then sign extended
7650 to a HOST_WIDE_INT. Using the constant directly can result
7651 in non-canonical RTL in a 64x32 cross compile. */
7653 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7655 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7656 op1 = plus_constant (op1, INTVAL (constant_part));
7657 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7658 op1 = force_operand (op1, target);
7662 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7663 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7664 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7668 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7670 if (! CONSTANT_P (op0))
7672 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7673 VOIDmode, modifier);
7674 /* Don't go to both_summands if modifier
7675 says it's not right to return a PLUS. */
7676 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7680 /* Use immed_double_const to ensure that the constant is
7681 truncated according to the mode of OP1, then sign extended
7682 to a HOST_WIDE_INT. Using the constant directly can result
7683 in non-canonical RTL in a 64x32 cross compile. */
7685 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7687 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7688 op0 = plus_constant (op0, INTVAL (constant_part));
7689 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7690 op0 = force_operand (op0, target);
7695 /* No sense saving up arithmetic to be done
7696 if it's all in the wrong mode to form part of an address.
7697 And force_operand won't know whether to sign-extend or
7699 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7700 || mode != ptr_mode)
7703 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7706 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7707 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7710 /* Make sure any term that's a sum with a constant comes last. */
7711 if (GET_CODE (op0) == PLUS
7712 && CONSTANT_P (XEXP (op0, 1)))
7718 /* If adding to a sum including a constant,
7719 associate it to put the constant outside. */
7720 if (GET_CODE (op1) == PLUS
7721 && CONSTANT_P (XEXP (op1, 1)))
7723 rtx constant_term = const0_rtx;
7725 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7728 /* Ensure that MULT comes first if there is one. */
7729 else if (GET_CODE (op0) == MULT)
7730 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7732 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7734 /* Let's also eliminate constants from op0 if possible. */
7735 op0 = eliminate_constant_term (op0, &constant_term);
7737 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7738 their sum should be a constant. Form it into OP1, since the
7739 result we want will then be OP0 + OP1. */
7741 temp = simplify_binary_operation (PLUS, mode, constant_term,
7746 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7749 /* Put a constant term last and put a multiplication first. */
7750 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7751 temp = op1, op1 = op0, op0 = temp;
7753 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7754 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7757 /* For initializers, we are allowed to return a MINUS of two
7758 symbolic constants. Here we handle all cases when both operands
7760 /* Handle difference of two symbolic constants,
7761 for the sake of an initializer. */
7762 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7763 && really_constant_p (TREE_OPERAND (exp, 0))
7764 && really_constant_p (TREE_OPERAND (exp, 1)))
7766 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7767 VOIDmode, ro_modifier);
7768 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7769 VOIDmode, ro_modifier);
7771 /* If the last operand is a CONST_INT, use plus_constant of
7772 the negated constant. Else make the MINUS. */
7773 if (GET_CODE (op1) == CONST_INT)
7774 return plus_constant (op0, - INTVAL (op1));
7776 return gen_rtx_MINUS (mode, op0, op1);
7778 /* Convert A - const to A + (-const). */
7779 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7781 tree negated = fold (build1 (NEGATE_EXPR, type,
7782 TREE_OPERAND (exp, 1)));
7784 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7785 /* If we can't negate the constant in TYPE, leave it alone and
7786 expand_binop will negate it for us. We used to try to do it
7787 here in the signed version of TYPE, but that doesn't work
7788 on POINTER_TYPEs. */;
7791 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7795 this_optab = ! unsignedp && flag_trapv
7796 && (GET_MODE_CLASS(mode) == MODE_INT)
7797 ? subv_optab : sub_optab;
7801 /* If first operand is constant, swap them.
7802 Thus the following special case checks need only
7803 check the second operand. */
7804 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7806 tree t1 = TREE_OPERAND (exp, 0);
7807 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7808 TREE_OPERAND (exp, 1) = t1;
7811 /* Attempt to return something suitable for generating an
7812 indexed address, for machines that support that. */
7814 if (modifier == EXPAND_SUM && mode == ptr_mode
7815 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7816 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7818 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7821 /* Apply distributive law if OP0 is x+c. */
7822 if (GET_CODE (op0) == PLUS
7823 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7828 (mode, XEXP (op0, 0),
7829 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7830 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7831 * INTVAL (XEXP (op0, 1))));
7833 if (GET_CODE (op0) != REG)
7834 op0 = force_operand (op0, NULL_RTX);
7835 if (GET_CODE (op0) != REG)
7836 op0 = copy_to_mode_reg (mode, op0);
7839 gen_rtx_MULT (mode, op0,
7840 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7843 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7846 /* Check for multiplying things that have been extended
7847 from a narrower type. If this machine supports multiplying
7848 in that narrower type with a result in the desired type,
7849 do it that way, and avoid the explicit type-conversion. */
7850 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7851 && TREE_CODE (type) == INTEGER_TYPE
7852 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7853 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7854 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7855 && int_fits_type_p (TREE_OPERAND (exp, 1),
7856 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7857 /* Don't use a widening multiply if a shift will do. */
7858 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7859 > HOST_BITS_PER_WIDE_INT)
7860 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7862 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7863 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7865 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7866 /* If both operands are extended, they must either both
7867 be zero-extended or both be sign-extended. */
7868 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7870 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7872 enum machine_mode innermode
7873 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7874 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7875 ? smul_widen_optab : umul_widen_optab);
7876 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7877 ? umul_widen_optab : smul_widen_optab);
7878 if (mode == GET_MODE_WIDER_MODE (innermode))
7880 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7882 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7883 NULL_RTX, VOIDmode, 0);
7884 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7885 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7888 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7889 NULL_RTX, VOIDmode, 0);
7892 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7893 && innermode == word_mode)
7896 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7897 NULL_RTX, VOIDmode, 0);
7898 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7899 op1 = convert_modes (innermode, mode,
7900 expand_expr (TREE_OPERAND (exp, 1),
7901 NULL_RTX, VOIDmode, 0),
7904 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7905 NULL_RTX, VOIDmode, 0);
7906 temp = expand_binop (mode, other_optab, op0, op1, target,
7907 unsignedp, OPTAB_LIB_WIDEN);
7908 htem = expand_mult_highpart_adjust (innermode,
7909 gen_highpart (innermode, temp),
7911 gen_highpart (innermode, temp),
7913 emit_move_insn (gen_highpart (innermode, temp), htem);
7918 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7919 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7920 return expand_mult (mode, op0, op1, target, unsignedp);
7922 case TRUNC_DIV_EXPR:
7923 case FLOOR_DIV_EXPR:
7925 case ROUND_DIV_EXPR:
7926 case EXACT_DIV_EXPR:
7927 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7929 /* Possible optimization: compute the dividend with EXPAND_SUM
7930 then if the divisor is constant can optimize the case
7931 where some terms of the dividend have coeffs divisible by it. */
7932 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7933 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7934 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7937 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7938 expensive divide. If not, combine will rebuild the original
7940 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7941 && !real_onep (TREE_OPERAND (exp, 0)))
7942 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7943 build (RDIV_EXPR, type,
7944 build_real (type, dconst1),
7945 TREE_OPERAND (exp, 1))),
7946 target, tmode, unsignedp);
7947 this_optab = sdiv_optab;
7950 case TRUNC_MOD_EXPR:
7951 case FLOOR_MOD_EXPR:
7953 case ROUND_MOD_EXPR:
7954 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7956 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7957 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7958 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7960 case FIX_ROUND_EXPR:
7961 case FIX_FLOOR_EXPR:
7963 abort (); /* Not used for C. */
7965 case FIX_TRUNC_EXPR:
7966 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7968 target = gen_reg_rtx (mode);
7969 expand_fix (target, op0, unsignedp);
7973 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7975 target = gen_reg_rtx (mode);
7976 /* expand_float can't figure out what to do if FROM has VOIDmode.
7977 So give it the correct mode. With -O, cse will optimize this. */
7978 if (GET_MODE (op0) == VOIDmode)
7979 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7981 expand_float (target, op0,
7982 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7986 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7987 temp = expand_unop (mode,
7988 ! unsignedp && flag_trapv
7989 && (GET_MODE_CLASS(mode) == MODE_INT)
7990 ? negv_optab : neg_optab, op0, target, 0);
7996 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7998 /* Handle complex values specially. */
7999 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8000 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8001 return expand_complex_abs (mode, op0, target, unsignedp);
8003 /* Unsigned abs is simply the operand. Testing here means we don't
8004 risk generating incorrect code below. */
8005 if (TREE_UNSIGNED (type))
8008 return expand_abs (mode, op0, target, unsignedp,
8009 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8013 target = original_target;
8014 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8015 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8016 || GET_MODE (target) != mode
8017 || (GET_CODE (target) == REG
8018 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8019 target = gen_reg_rtx (mode);
8020 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8021 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8023 /* First try to do it with a special MIN or MAX instruction.
8024 If that does not win, use a conditional jump to select the proper
8026 this_optab = (TREE_UNSIGNED (type)
8027 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8028 : (code == MIN_EXPR ? smin_optab : smax_optab));
8030 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8035 /* At this point, a MEM target is no longer useful; we will get better
8038 if (GET_CODE (target) == MEM)
8039 target = gen_reg_rtx (mode);
8042 emit_move_insn (target, op0);
8044 op0 = gen_label_rtx ();
8046 /* If this mode is an integer too wide to compare properly,
8047 compare word by word. Rely on cse to optimize constant cases. */
8048 if (GET_MODE_CLASS (mode) == MODE_INT
8049 && ! can_compare_p (GE, mode, ccp_jump))
8051 if (code == MAX_EXPR)
8052 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8053 target, op1, NULL_RTX, op0);
8055 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8056 op1, target, NULL_RTX, op0);
8060 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8061 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8062 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
8065 emit_move_insn (target, op1);
8070 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8071 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8077 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8078 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8083 /* ??? Can optimize bitwise operations with one arg constant.
8084 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8085 and (a bitwise1 b) bitwise2 b (etc)
8086 but that is probably not worth while. */
8088 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8089 boolean values when we want in all cases to compute both of them. In
8090 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8091 as actual zero-or-1 values and then bitwise anding. In cases where
8092 there cannot be any side effects, better code would be made by
8093 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8094 how to recognize those cases. */
8096 case TRUTH_AND_EXPR:
8098 this_optab = and_optab;
8103 this_optab = ior_optab;
8106 case TRUTH_XOR_EXPR:
8108 this_optab = xor_optab;
8115 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8117 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8118 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8121 /* Could determine the answer when only additive constants differ. Also,
8122 the addition of one can be handled by changing the condition. */
8129 case UNORDERED_EXPR:
8136 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8140 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8141 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8143 && GET_CODE (original_target) == REG
8144 && (GET_MODE (original_target)
8145 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8147 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8150 if (temp != original_target)
8151 temp = copy_to_reg (temp);
8153 op1 = gen_label_rtx ();
8154 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8155 GET_MODE (temp), unsignedp, 0, op1);
8156 emit_move_insn (temp, const1_rtx);
8161 /* If no set-flag instruction, must generate a conditional
8162 store into a temporary variable. Drop through
8163 and handle this like && and ||. */
8165 case TRUTH_ANDIF_EXPR:
8166 case TRUTH_ORIF_EXPR:
8168 && (target == 0 || ! safe_from_p (target, exp, 1)
8169 /* Make sure we don't have a hard reg (such as function's return
8170 value) live across basic blocks, if not optimizing. */
8171 || (!optimize && GET_CODE (target) == REG
8172 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8173 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8176 emit_clr_insn (target);
8178 op1 = gen_label_rtx ();
8179 jumpifnot (exp, op1);
8182 emit_0_to_1_insn (target);
8185 return ignore ? const0_rtx : target;
8187 case TRUTH_NOT_EXPR:
8188 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8189 /* The parser is careful to generate TRUTH_NOT_EXPR
8190 only with operands that are always zero or one. */
8191 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8192 target, 1, OPTAB_LIB_WIDEN);
8198 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8200 return expand_expr (TREE_OPERAND (exp, 1),
8201 (ignore ? const0_rtx : target),
8205 /* If we would have a "singleton" (see below) were it not for a
8206 conversion in each arm, bring that conversion back out. */
8207 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8208 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8209 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8210 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8212 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8213 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8215 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8216 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8217 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8218 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8219 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8220 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8221 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8222 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8223 return expand_expr (build1 (NOP_EXPR, type,
8224 build (COND_EXPR, TREE_TYPE (iftrue),
8225 TREE_OPERAND (exp, 0),
8227 target, tmode, modifier);
8231 /* Note that COND_EXPRs whose type is a structure or union
8232 are required to be constructed to contain assignments of
8233 a temporary variable, so that we can evaluate them here
8234 for side effect only. If type is void, we must do likewise. */
8236 /* If an arm of the branch requires a cleanup,
8237 only that cleanup is performed. */
8240 tree binary_op = 0, unary_op = 0;
8242 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8243 convert it to our mode, if necessary. */
8244 if (integer_onep (TREE_OPERAND (exp, 1))
8245 && integer_zerop (TREE_OPERAND (exp, 2))
8246 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8250 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8255 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8256 if (GET_MODE (op0) == mode)
8260 target = gen_reg_rtx (mode);
8261 convert_move (target, op0, unsignedp);
8265 /* Check for X ? A + B : A. If we have this, we can copy A to the
8266 output and conditionally add B. Similarly for unary operations.
8267 Don't do this if X has side-effects because those side effects
8268 might affect A or B and the "?" operation is a sequence point in
8269 ANSI. (operand_equal_p tests for side effects.) */
8271 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8272 && operand_equal_p (TREE_OPERAND (exp, 2),
8273 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8274 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8275 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8276 && operand_equal_p (TREE_OPERAND (exp, 1),
8277 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8278 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8279 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8280 && operand_equal_p (TREE_OPERAND (exp, 2),
8281 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8282 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8283 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8284 && operand_equal_p (TREE_OPERAND (exp, 1),
8285 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8286 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8288 /* If we are not to produce a result, we have no target. Otherwise,
8289 if a target was specified use it; it will not be used as an
8290 intermediate target unless it is safe. If no target, use a
8295 else if (original_target
8296 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8297 || (singleton && GET_CODE (original_target) == REG
8298 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8299 && original_target == var_rtx (singleton)))
8300 && GET_MODE (original_target) == mode
8301 #ifdef HAVE_conditional_move
8302 && (! can_conditionally_move_p (mode)
8303 || GET_CODE (original_target) == REG
8304 || TREE_ADDRESSABLE (type))
8306 && (GET_CODE (original_target) != MEM
8307 || TREE_ADDRESSABLE (type)))
8308 temp = original_target;
8309 else if (TREE_ADDRESSABLE (type))
8312 temp = assign_temp (type, 0, 0, 1);
8314 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8315 do the test of X as a store-flag operation, do this as
8316 A + ((X != 0) << log C). Similarly for other simple binary
8317 operators. Only do for C == 1 if BRANCH_COST is low. */
8318 if (temp && singleton && binary_op
8319 && (TREE_CODE (binary_op) == PLUS_EXPR
8320 || TREE_CODE (binary_op) == MINUS_EXPR
8321 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8322 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8323 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8324 : integer_onep (TREE_OPERAND (binary_op, 1)))
8325 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8328 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8329 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8330 ? addv_optab : add_optab)
8331 : TREE_CODE (binary_op) == MINUS_EXPR
8332 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8333 ? subv_optab : sub_optab)
8334 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8337 /* If we had X ? A : A + 1, do this as A + (X == 0).
8339 We have to invert the truth value here and then put it
8340 back later if do_store_flag fails. We cannot simply copy
8341 TREE_OPERAND (exp, 0) to another variable and modify that
8342 because invert_truthvalue can modify the tree pointed to
8344 if (singleton == TREE_OPERAND (exp, 1))
8345 TREE_OPERAND (exp, 0)
8346 = invert_truthvalue (TREE_OPERAND (exp, 0));
8348 result = do_store_flag (TREE_OPERAND (exp, 0),
8349 (safe_from_p (temp, singleton, 1)
8351 mode, BRANCH_COST <= 1);
8353 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8354 result = expand_shift (LSHIFT_EXPR, mode, result,
8355 build_int_2 (tree_log2
8359 (safe_from_p (temp, singleton, 1)
8360 ? temp : NULL_RTX), 0);
8364 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8365 return expand_binop (mode, boptab, op1, result, temp,
8366 unsignedp, OPTAB_LIB_WIDEN);
8368 else if (singleton == TREE_OPERAND (exp, 1))
8369 TREE_OPERAND (exp, 0)
8370 = invert_truthvalue (TREE_OPERAND (exp, 0));
8373 do_pending_stack_adjust ();
8375 op0 = gen_label_rtx ();
8377 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8381 /* If the target conflicts with the other operand of the
8382 binary op, we can't use it. Also, we can't use the target
8383 if it is a hard register, because evaluating the condition
8384 might clobber it. */
8386 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8387 || (GET_CODE (temp) == REG
8388 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8389 temp = gen_reg_rtx (mode);
8390 store_expr (singleton, temp, 0);
8393 expand_expr (singleton,
8394 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8395 if (singleton == TREE_OPERAND (exp, 1))
8396 jumpif (TREE_OPERAND (exp, 0), op0);
8398 jumpifnot (TREE_OPERAND (exp, 0), op0);
8400 start_cleanup_deferral ();
8401 if (binary_op && temp == 0)
8402 /* Just touch the other operand. */
8403 expand_expr (TREE_OPERAND (binary_op, 1),
8404 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8406 store_expr (build (TREE_CODE (binary_op), type,
8407 make_tree (type, temp),
8408 TREE_OPERAND (binary_op, 1)),
8411 store_expr (build1 (TREE_CODE (unary_op), type,
8412 make_tree (type, temp)),
8416 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8417 comparison operator. If we have one of these cases, set the
8418 output to A, branch on A (cse will merge these two references),
8419 then set the output to FOO. */
8421 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8422 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8423 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8424 TREE_OPERAND (exp, 1), 0)
8425 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8426 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8427 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8429 if (GET_CODE (temp) == REG
8430 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8431 temp = gen_reg_rtx (mode);
8432 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8433 jumpif (TREE_OPERAND (exp, 0), op0);
8435 start_cleanup_deferral ();
8436 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8440 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8441 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8442 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8443 TREE_OPERAND (exp, 2), 0)
8444 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8445 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8446 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8448 if (GET_CODE (temp) == REG
8449 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8450 temp = gen_reg_rtx (mode);
8451 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8452 jumpifnot (TREE_OPERAND (exp, 0), op0);
8454 start_cleanup_deferral ();
8455 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8460 op1 = gen_label_rtx ();
8461 jumpifnot (TREE_OPERAND (exp, 0), op0);
8463 start_cleanup_deferral ();
8465 /* One branch of the cond can be void, if it never returns. For
8466 example A ? throw : E */
8468 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8469 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8471 expand_expr (TREE_OPERAND (exp, 1),
8472 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8473 end_cleanup_deferral ();
8475 emit_jump_insn (gen_jump (op1));
8478 start_cleanup_deferral ();
8480 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8481 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8483 expand_expr (TREE_OPERAND (exp, 2),
8484 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8487 end_cleanup_deferral ();
8498 /* Something needs to be initialized, but we didn't know
8499 where that thing was when building the tree. For example,
8500 it could be the return value of a function, or a parameter
8501 to a function which lays down in the stack, or a temporary
8502 variable which must be passed by reference.
8504 We guarantee that the expression will either be constructed
8505 or copied into our original target. */
8507 tree slot = TREE_OPERAND (exp, 0);
8508 tree cleanups = NULL_TREE;
8511 if (TREE_CODE (slot) != VAR_DECL)
8515 target = original_target;
8517 /* Set this here so that if we get a target that refers to a
8518 register variable that's already been used, put_reg_into_stack
8519 knows that it should fix up those uses. */
8520 TREE_USED (slot) = 1;
8524 if (DECL_RTL_SET_P (slot))
8526 target = DECL_RTL (slot);
8527 /* If we have already expanded the slot, so don't do
8529 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8534 target = assign_temp (type, 2, 0, 1);
8535 /* All temp slots at this level must not conflict. */
8536 preserve_temp_slots (target);
8537 SET_DECL_RTL (slot, target);
8538 if (TREE_ADDRESSABLE (slot))
8539 put_var_into_stack (slot);
8541 /* Since SLOT is not known to the called function
8542 to belong to its stack frame, we must build an explicit
8543 cleanup. This case occurs when we must build up a reference
8544 to pass the reference as an argument. In this case,
8545 it is very likely that such a reference need not be
8548 if (TREE_OPERAND (exp, 2) == 0)
8549 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8550 cleanups = TREE_OPERAND (exp, 2);
8555 /* This case does occur, when expanding a parameter which
8556 needs to be constructed on the stack. The target
8557 is the actual stack address that we want to initialize.
8558 The function we call will perform the cleanup in this case. */
8560 /* If we have already assigned it space, use that space,
8561 not target that we were passed in, as our target
8562 parameter is only a hint. */
8563 if (DECL_RTL_SET_P (slot))
8565 target = DECL_RTL (slot);
8566 /* If we have already expanded the slot, so don't do
8568 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8573 SET_DECL_RTL (slot, target);
8574 /* If we must have an addressable slot, then make sure that
8575 the RTL that we just stored in slot is OK. */
8576 if (TREE_ADDRESSABLE (slot))
8577 put_var_into_stack (slot);
8581 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8582 /* Mark it as expanded. */
8583 TREE_OPERAND (exp, 1) = NULL_TREE;
8585 store_expr (exp1, target, 0);
8587 expand_decl_cleanup (NULL_TREE, cleanups);
8594 tree lhs = TREE_OPERAND (exp, 0);
8595 tree rhs = TREE_OPERAND (exp, 1);
8597 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8603 /* If lhs is complex, expand calls in rhs before computing it.
8604 That's so we don't compute a pointer and save it over a
8605 call. If lhs is simple, compute it first so we can give it
8606 as a target if the rhs is just a call. This avoids an
8607 extra temp and copy and that prevents a partial-subsumption
8608 which makes bad code. Actually we could treat
8609 component_ref's of vars like vars. */
8611 tree lhs = TREE_OPERAND (exp, 0);
8612 tree rhs = TREE_OPERAND (exp, 1);
8616 /* Check for |= or &= of a bitfield of size one into another bitfield
8617 of size 1. In this case, (unless we need the result of the
8618 assignment) we can do this more efficiently with a
8619 test followed by an assignment, if necessary.
8621 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8622 things change so we do, this code should be enhanced to
8625 && TREE_CODE (lhs) == COMPONENT_REF
8626 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8627 || TREE_CODE (rhs) == BIT_AND_EXPR)
8628 && TREE_OPERAND (rhs, 0) == lhs
8629 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8630 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8631 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8633 rtx label = gen_label_rtx ();
8635 do_jump (TREE_OPERAND (rhs, 1),
8636 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8637 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8638 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8639 (TREE_CODE (rhs) == BIT_IOR_EXPR
8641 : integer_zero_node)),
8643 do_pending_stack_adjust ();
8648 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8654 if (!TREE_OPERAND (exp, 0))
8655 expand_null_return ();
8657 expand_return (TREE_OPERAND (exp, 0));
8660 case PREINCREMENT_EXPR:
8661 case PREDECREMENT_EXPR:
8662 return expand_increment (exp, 0, ignore);
8664 case POSTINCREMENT_EXPR:
8665 case POSTDECREMENT_EXPR:
8666 /* Faster to treat as pre-increment if result is not used. */
8667 return expand_increment (exp, ! ignore, ignore);
8670 /* If nonzero, TEMP will be set to the address of something that might
8671 be a MEM corresponding to a stack slot. */
8674 /* Are we taking the address of a nested function? */
8675 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8676 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8677 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8678 && ! TREE_STATIC (exp))
8680 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8681 op0 = force_operand (op0, target);
8683 /* If we are taking the address of something erroneous, just
8685 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8687 /* If we are taking the address of a constant and are at the
8688 top level, we have to use output_constant_def since we can't
8689 call force_const_mem at top level. */
8691 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8692 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8694 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8697 /* We make sure to pass const0_rtx down if we came in with
8698 ignore set, to avoid doing the cleanups twice for something. */
8699 op0 = expand_expr (TREE_OPERAND (exp, 0),
8700 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8701 (modifier == EXPAND_INITIALIZER
8702 ? modifier : EXPAND_CONST_ADDRESS));
8704 /* If we are going to ignore the result, OP0 will have been set
8705 to const0_rtx, so just return it. Don't get confused and
8706 think we are taking the address of the constant. */
8710 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8711 clever and returns a REG when given a MEM. */
8712 op0 = protect_from_queue (op0, 1);
8714 /* We would like the object in memory. If it is a constant, we can
8715 have it be statically allocated into memory. For a non-constant,
8716 we need to allocate some memory and store the value into it. */
8718 if (CONSTANT_P (op0))
8719 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8721 else if (GET_CODE (op0) == MEM)
8723 mark_temp_addr_taken (op0);
8724 temp = XEXP (op0, 0);
8727 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8728 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8729 || GET_CODE (op0) == PARALLEL)
8731 /* If this object is in a register, it must be not
8733 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8734 tree nt = build_qualified_type (inner_type,
8735 (TYPE_QUALS (inner_type)
8736 | TYPE_QUAL_CONST));
8737 rtx memloc = assign_temp (nt, 1, 1, 1);
8739 mark_temp_addr_taken (memloc);
8740 if (GET_CODE (op0) == PARALLEL)
8741 /* Handle calls that pass values in multiple non-contiguous
8742 locations. The Irix 6 ABI has examples of this. */
8743 emit_group_store (memloc, op0,
8744 int_size_in_bytes (inner_type),
8745 TYPE_ALIGN (inner_type));
8747 emit_move_insn (memloc, op0);
8751 if (GET_CODE (op0) != MEM)
8754 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8756 temp = XEXP (op0, 0);
8757 #ifdef POINTERS_EXTEND_UNSIGNED
8758 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8759 && mode == ptr_mode)
8760 temp = convert_memory_address (ptr_mode, temp);
8765 op0 = force_operand (XEXP (op0, 0), target);
8768 if (flag_force_addr && GET_CODE (op0) != REG)
8769 op0 = force_reg (Pmode, op0);
8771 if (GET_CODE (op0) == REG
8772 && ! REG_USERVAR_P (op0))
8773 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8775 /* If we might have had a temp slot, add an equivalent address
8778 update_temp_slot_address (temp, op0);
8780 #ifdef POINTERS_EXTEND_UNSIGNED
8781 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8782 && mode == ptr_mode)
8783 op0 = convert_memory_address (ptr_mode, op0);
8788 case ENTRY_VALUE_EXPR:
8791 /* COMPLEX type for Extended Pascal & Fortran */
8794 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8797 /* Get the rtx code of the operands. */
8798 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8799 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8802 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8806 /* Move the real (op0) and imaginary (op1) parts to their location. */
8807 emit_move_insn (gen_realpart (mode, target), op0);
8808 emit_move_insn (gen_imagpart (mode, target), op1);
8810 insns = get_insns ();
8813 /* Complex construction should appear as a single unit. */
8814 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8815 each with a separate pseudo as destination.
8816 It's not correct for flow to treat them as a unit. */
8817 if (GET_CODE (target) != CONCAT)
8818 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8826 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8827 return gen_realpart (mode, op0);
8830 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8831 return gen_imagpart (mode, op0);
8835 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8839 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8842 target = gen_reg_rtx (mode);
8846 /* Store the realpart and the negated imagpart to target. */
8847 emit_move_insn (gen_realpart (partmode, target),
8848 gen_realpart (partmode, op0));
8850 imag_t = gen_imagpart (partmode, target);
8851 temp = expand_unop (partmode,
8852 ! unsignedp && flag_trapv
8853 && (GET_MODE_CLASS(partmode) == MODE_INT)
8854 ? negv_optab : neg_optab,
8855 gen_imagpart (partmode, op0), imag_t, 0);
8857 emit_move_insn (imag_t, temp);
8859 insns = get_insns ();
8862 /* Conjugate should appear as a single unit
8863 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8864 each with a separate pseudo as destination.
8865 It's not correct for flow to treat them as a unit. */
8866 if (GET_CODE (target) != CONCAT)
8867 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8874 case TRY_CATCH_EXPR:
8876 tree handler = TREE_OPERAND (exp, 1);
8878 expand_eh_region_start ();
8880 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8882 expand_eh_region_end_cleanup (handler);
8887 case TRY_FINALLY_EXPR:
8889 tree try_block = TREE_OPERAND (exp, 0);
8890 tree finally_block = TREE_OPERAND (exp, 1);
8891 rtx finally_label = gen_label_rtx ();
8892 rtx done_label = gen_label_rtx ();
8893 rtx return_link = gen_reg_rtx (Pmode);
8894 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8895 (tree) finally_label, (tree) return_link);
8896 TREE_SIDE_EFFECTS (cleanup) = 1;
8898 /* Start a new binding layer that will keep track of all cleanup
8899 actions to be performed. */
8900 expand_start_bindings (2);
8902 target_temp_slot_level = temp_slot_level;
8904 expand_decl_cleanup (NULL_TREE, cleanup);
8905 op0 = expand_expr (try_block, target, tmode, modifier);
8907 preserve_temp_slots (op0);
8908 expand_end_bindings (NULL_TREE, 0, 0);
8909 emit_jump (done_label);
8910 emit_label (finally_label);
8911 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8912 emit_indirect_jump (return_link);
8913 emit_label (done_label);
8917 case GOTO_SUBROUTINE_EXPR:
8919 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8920 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8921 rtx return_address = gen_label_rtx ();
8922 emit_move_insn (return_link,
8923 gen_rtx_LABEL_REF (Pmode, return_address));
8925 emit_label (return_address);
8930 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8933 return get_exception_pointer (cfun);
8936 /* Function descriptors are not valid except for as
8937 initialization constants, and should not be expanded. */
8941 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8944 /* Here to do an ordinary binary operator, generating an instruction
8945 from the optab already placed in `this_optab'. */
8947 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8949 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8950 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8952 temp = expand_binop (mode, this_optab, op0, op1, target,
8953 unsignedp, OPTAB_LIB_WIDEN);
8959 /* Similar to expand_expr, except that we don't specify a target, target
8960 mode, or modifier and we return the alignment of the inner type. This is
8961 used in cases where it is not necessary to align the result to the
8962 alignment of its type as long as we know the alignment of the result, for
8963 example for comparisons of BLKmode values. */
8966 expand_expr_unaligned (exp, palign)
8968 unsigned int *palign;
8971 tree type = TREE_TYPE (exp);
8972 enum machine_mode mode = TYPE_MODE (type);
8974 /* Default the alignment we return to that of the type. */
8975 *palign = TYPE_ALIGN (type);
8977 /* The only cases in which we do anything special is if the resulting mode
8979 if (mode != BLKmode)
8980 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8982 switch (TREE_CODE (exp))
8986 case NON_LVALUE_EXPR:
8987 /* Conversions between BLKmode values don't change the underlying
8988 alignment or value. */
8989 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8990 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8994 /* Much of the code for this case is copied directly from expand_expr.
8995 We need to duplicate it here because we will do something different
8996 in the fall-through case, so we need to handle the same exceptions
8999 tree array = TREE_OPERAND (exp, 0);
9000 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
9001 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
9002 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
9005 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
9008 /* Optimize the special-case of a zero lower bound.
9010 We convert the low_bound to sizetype to avoid some problems
9011 with constant folding. (E.g. suppose the lower bound is 1,
9012 and its mode is QI. Without the conversion, (ARRAY
9013 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9014 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
9016 if (! integer_zerop (low_bound))
9017 index = size_diffop (index, convert (sizetype, low_bound));
9019 /* If this is a constant index into a constant array,
9020 just get the value from the array. Handle both the cases when
9021 we have an explicit constructor and when our operand is a variable
9022 that was declared const. */
9024 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
9025 && host_integerp (index, 0)
9026 && 0 > compare_tree_int (index,
9027 list_length (CONSTRUCTOR_ELTS
9028 (TREE_OPERAND (exp, 0)))))
9032 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
9033 i = tree_low_cst (index, 0);
9034 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
9038 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
9041 else if (optimize >= 1
9042 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9043 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
9044 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
9046 if (TREE_CODE (index) == INTEGER_CST)
9048 tree init = DECL_INITIAL (array);
9050 if (TREE_CODE (init) == CONSTRUCTOR)
9054 for (elem = CONSTRUCTOR_ELTS (init);
9055 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
9056 elem = TREE_CHAIN (elem))
9060 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
9070 case ARRAY_RANGE_REF:
9071 /* If the operand is a CONSTRUCTOR, we can just extract the
9072 appropriate field if it is present. Don't do this if we have
9073 already written the data since we want to refer to that copy
9074 and varasm.c assumes that's what we'll do. */
9075 if (TREE_CODE (exp) == COMPONENT_REF
9076 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9077 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
9081 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
9082 elt = TREE_CHAIN (elt))
9083 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
9084 /* Note that unlike the case in expand_expr, we know this is
9085 BLKmode and hence not an integer. */
9086 return expand_expr_unaligned (TREE_VALUE (elt), palign);
9090 enum machine_mode mode1;
9091 HOST_WIDE_INT bitsize, bitpos;
9094 unsigned int alignment;
9096 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9097 &mode1, &unsignedp, &volatilep,
9100 /* If we got back the original object, something is wrong. Perhaps
9101 we are evaluating an expression too early. In any event, don't
9102 infinitely recurse. */
9106 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9108 /* If this is a constant, put it into a register if it is a
9109 legitimate constant and OFFSET is 0 and memory if it isn't. */
9110 if (CONSTANT_P (op0))
9112 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9114 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9116 op0 = force_reg (inner_mode, op0);
9118 op0 = validize_mem (force_const_mem (inner_mode, op0));
9123 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9125 /* If this object is in a register, put it into memory.
9126 This case can't occur in C, but can in Ada if we have
9127 unchecked conversion of an expression from a scalar type to
9128 an array or record type. */
9129 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9130 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9132 tree nt = build_qualified_type (TREE_TYPE (tem),
9133 (TYPE_QUALS (TREE_TYPE (tem))
9134 | TYPE_QUAL_CONST));
9135 rtx memloc = assign_temp (nt, 1, 1, 1);
9137 mark_temp_addr_taken (memloc);
9138 emit_move_insn (memloc, op0);
9142 if (GET_CODE (op0) != MEM)
9145 if (GET_MODE (offset_rtx) != ptr_mode)
9146 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9148 #ifdef POINTERS_EXTEND_UNSIGNED
9149 if (GET_MODE (offset_rtx) != Pmode)
9150 offset_rtx = convert_memory_address (Pmode, offset_rtx);
9153 op0 = offset_address (op0, offset_rtx,
9154 highest_pow2_factor (offset));
9157 /* Don't forget about volatility even if this is a bitfield. */
9158 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9160 op0 = copy_rtx (op0);
9161 MEM_VOLATILE_P (op0) = 1;
9164 /* Check the access. */
9165 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9170 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9171 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9173 /* Check the access right of the pointer. */
9174 in_check_memory_usage = 1;
9175 if (size > BITS_PER_UNIT)
9176 emit_library_call (chkr_check_addr_libfunc,
9177 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9178 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9179 TYPE_MODE (sizetype),
9180 GEN_INT (MEMORY_USE_RO),
9181 TYPE_MODE (integer_type_node));
9182 in_check_memory_usage = 0;
9185 /* In cases where an aligned union has an unaligned object
9186 as a field, we might be extracting a BLKmode value from
9187 an integer-mode (e.g., SImode) object. Handle this case
9188 by doing the extract into an object as wide as the field
9189 (which we know to be the width of a basic mode), then
9190 storing into memory, and changing the mode to BLKmode.
9191 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9192 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9193 if (mode1 == VOIDmode
9194 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9195 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9196 && (TYPE_ALIGN (type) > alignment
9197 || bitpos % TYPE_ALIGN (type) != 0)))
9199 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9201 if (ext_mode == BLKmode)
9203 /* In this case, BITPOS must start at a byte boundary. */
9204 if (GET_CODE (op0) != MEM
9205 || bitpos % BITS_PER_UNIT != 0)
9208 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9212 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9214 rtx new = assign_temp (nt, 0, 1, 1);
9216 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9217 unsignedp, NULL_RTX, ext_mode,
9218 ext_mode, alignment,
9219 int_size_in_bytes (TREE_TYPE (tem)));
9221 /* If the result is a record type and BITSIZE is narrower than
9222 the mode of OP0, an integral mode, and this is a big endian
9223 machine, we must put the field into the high-order bits. */
9224 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9225 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9226 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9227 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9228 size_int (GET_MODE_BITSIZE
9233 emit_move_insn (new, op0);
9234 op0 = copy_rtx (new);
9235 PUT_MODE (op0, BLKmode);
9239 /* Get a reference to just this component. */
9240 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9242 set_mem_attributes (op0, exp, 0);
9244 /* Adjust the alignment in case the bit position is not
9245 a multiple of the alignment of the inner object. */
9246 while (bitpos % alignment != 0)
9249 if (GET_CODE (XEXP (op0, 0)) == REG)
9250 mark_reg_pointer (XEXP (op0, 0), alignment);
9252 MEM_IN_STRUCT_P (op0) = 1;
9253 MEM_VOLATILE_P (op0) |= volatilep;
9255 *palign = alignment;
9264 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9267 /* Return the tree node if a ARG corresponds to a string constant or zero
9268 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9269 in bytes within the string that ARG is accessing. The type of the
9270 offset will be `sizetype'. */
9273 string_constant (arg, ptr_offset)
9279 if (TREE_CODE (arg) == ADDR_EXPR
9280 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9282 *ptr_offset = size_zero_node;
9283 return TREE_OPERAND (arg, 0);
9285 else if (TREE_CODE (arg) == PLUS_EXPR)
9287 tree arg0 = TREE_OPERAND (arg, 0);
9288 tree arg1 = TREE_OPERAND (arg, 1);
9293 if (TREE_CODE (arg0) == ADDR_EXPR
9294 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9296 *ptr_offset = convert (sizetype, arg1);
9297 return TREE_OPERAND (arg0, 0);
9299 else if (TREE_CODE (arg1) == ADDR_EXPR
9300 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9302 *ptr_offset = convert (sizetype, arg0);
9303 return TREE_OPERAND (arg1, 0);
9310 /* Expand code for a post- or pre- increment or decrement
9311 and return the RTX for the result.
9312 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9315 expand_increment (exp, post, ignore)
9321 tree incremented = TREE_OPERAND (exp, 0);
9322 optab this_optab = add_optab;
9324 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9325 int op0_is_copy = 0;
9326 int single_insn = 0;
9327 /* 1 means we can't store into OP0 directly,
9328 because it is a subreg narrower than a word,
9329 and we don't dare clobber the rest of the word. */
9332 /* Stabilize any component ref that might need to be
9333 evaluated more than once below. */
9335 || TREE_CODE (incremented) == BIT_FIELD_REF
9336 || (TREE_CODE (incremented) == COMPONENT_REF
9337 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9338 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9339 incremented = stabilize_reference (incremented);
9340 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9341 ones into save exprs so that they don't accidentally get evaluated
9342 more than once by the code below. */
9343 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9344 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9345 incremented = save_expr (incremented);
9347 /* Compute the operands as RTX.
9348 Note whether OP0 is the actual lvalue or a copy of it:
9349 I believe it is a copy iff it is a register or subreg
9350 and insns were generated in computing it. */
9352 temp = get_last_insn ();
9353 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9355 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9356 in place but instead must do sign- or zero-extension during assignment,
9357 so we copy it into a new register and let the code below use it as
9360 Note that we can safely modify this SUBREG since it is know not to be
9361 shared (it was made by the expand_expr call above). */
9363 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9366 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9370 else if (GET_CODE (op0) == SUBREG
9371 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9373 /* We cannot increment this SUBREG in place. If we are
9374 post-incrementing, get a copy of the old value. Otherwise,
9375 just mark that we cannot increment in place. */
9377 op0 = copy_to_reg (op0);
9382 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9383 && temp != get_last_insn ());
9384 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9385 EXPAND_MEMORY_USE_BAD);
9387 /* Decide whether incrementing or decrementing. */
9388 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9389 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9390 this_optab = sub_optab;
9392 /* Convert decrement by a constant into a negative increment. */
9393 if (this_optab == sub_optab
9394 && GET_CODE (op1) == CONST_INT)
9396 op1 = GEN_INT (-INTVAL (op1));
9397 this_optab = add_optab;
9400 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9401 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9403 /* For a preincrement, see if we can do this with a single instruction. */
9406 icode = (int) this_optab->handlers[(int) mode].insn_code;
9407 if (icode != (int) CODE_FOR_nothing
9408 /* Make sure that OP0 is valid for operands 0 and 1
9409 of the insn we want to queue. */
9410 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9411 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9412 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9416 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9417 then we cannot just increment OP0. We must therefore contrive to
9418 increment the original value. Then, for postincrement, we can return
9419 OP0 since it is a copy of the old value. For preincrement, expand here
9420 unless we can do it with a single insn.
9422 Likewise if storing directly into OP0 would clobber high bits
9423 we need to preserve (bad_subreg). */
9424 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9426 /* This is the easiest way to increment the value wherever it is.
9427 Problems with multiple evaluation of INCREMENTED are prevented
9428 because either (1) it is a component_ref or preincrement,
9429 in which case it was stabilized above, or (2) it is an array_ref
9430 with constant index in an array in a register, which is
9431 safe to reevaluate. */
9432 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9433 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9434 ? MINUS_EXPR : PLUS_EXPR),
9437 TREE_OPERAND (exp, 1));
9439 while (TREE_CODE (incremented) == NOP_EXPR
9440 || TREE_CODE (incremented) == CONVERT_EXPR)
9442 newexp = convert (TREE_TYPE (incremented), newexp);
9443 incremented = TREE_OPERAND (incremented, 0);
9446 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9447 return post ? op0 : temp;
9452 /* We have a true reference to the value in OP0.
9453 If there is an insn to add or subtract in this mode, queue it.
9454 Queueing the increment insn avoids the register shuffling
9455 that often results if we must increment now and first save
9456 the old value for subsequent use. */
9458 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9459 op0 = stabilize (op0);
9462 icode = (int) this_optab->handlers[(int) mode].insn_code;
9463 if (icode != (int) CODE_FOR_nothing
9464 /* Make sure that OP0 is valid for operands 0 and 1
9465 of the insn we want to queue. */
9466 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9467 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9469 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9470 op1 = force_reg (mode, op1);
9472 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9474 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9476 rtx addr = (general_operand (XEXP (op0, 0), mode)
9477 ? force_reg (Pmode, XEXP (op0, 0))
9478 : copy_to_reg (XEXP (op0, 0)));
9481 op0 = replace_equiv_address (op0, addr);
9482 temp = force_reg (GET_MODE (op0), op0);
9483 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9484 op1 = force_reg (mode, op1);
9486 /* The increment queue is LIFO, thus we have to `queue'
9487 the instructions in reverse order. */
9488 enqueue_insn (op0, gen_move_insn (op0, temp));
9489 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9494 /* Preincrement, or we can't increment with one simple insn. */
9496 /* Save a copy of the value before inc or dec, to return it later. */
9497 temp = value = copy_to_reg (op0);
9499 /* Arrange to return the incremented value. */
9500 /* Copy the rtx because expand_binop will protect from the queue,
9501 and the results of that would be invalid for us to return
9502 if our caller does emit_queue before using our result. */
9503 temp = copy_rtx (value = op0);
9505 /* Increment however we can. */
9506 op1 = expand_binop (mode, this_optab, value, op1,
9507 current_function_check_memory_usage ? NULL_RTX : op0,
9508 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9509 /* Make sure the value is stored into OP0. */
9511 emit_move_insn (op0, op1);
9516 /* At the start of a function, record that we have no previously-pushed
9517 arguments waiting to be popped. */
9520 init_pending_stack_adjust ()
9522 pending_stack_adjust = 0;
9525 /* When exiting from function, if safe, clear out any pending stack adjust
9526 so the adjustment won't get done.
9528 Note, if the current function calls alloca, then it must have a
9529 frame pointer regardless of the value of flag_omit_frame_pointer. */
9532 clear_pending_stack_adjust ()
9534 #ifdef EXIT_IGNORE_STACK
9536 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9537 && EXIT_IGNORE_STACK
9538 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9539 && ! flag_inline_functions)
9541 stack_pointer_delta -= pending_stack_adjust,
9542 pending_stack_adjust = 0;
9547 /* Pop any previously-pushed arguments that have not been popped yet. */
9550 do_pending_stack_adjust ()
9552 if (inhibit_defer_pop == 0)
9554 if (pending_stack_adjust != 0)
9555 adjust_stack (GEN_INT (pending_stack_adjust));
9556 pending_stack_adjust = 0;
9560 /* Expand conditional expressions. */
9562 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9563 LABEL is an rtx of code CODE_LABEL, in this function and all the
9567 jumpifnot (exp, label)
9571 do_jump (exp, label, NULL_RTX);
9574 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9581 do_jump (exp, NULL_RTX, label);
9584 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9585 the result is zero, or IF_TRUE_LABEL if the result is one.
9586 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9587 meaning fall through in that case.
9589 do_jump always does any pending stack adjust except when it does not
9590 actually perform a jump. An example where there is no jump
9591 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9593 This function is responsible for optimizing cases such as
9594 &&, || and comparison operators in EXP. */
9597 do_jump (exp, if_false_label, if_true_label)
9599 rtx if_false_label, if_true_label;
9601 enum tree_code code = TREE_CODE (exp);
9602 /* Some cases need to create a label to jump to
9603 in order to properly fall through.
9604 These cases set DROP_THROUGH_LABEL nonzero. */
9605 rtx drop_through_label = 0;
9609 enum machine_mode mode;
9611 #ifdef MAX_INTEGER_COMPUTATION_MODE
9612 check_max_integer_computation_mode (exp);
9623 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9629 /* This is not true with #pragma weak */
9631 /* The address of something can never be zero. */
9633 emit_jump (if_true_label);
9638 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9639 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9640 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9641 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9644 /* If we are narrowing the operand, we have to do the compare in the
9646 if ((TYPE_PRECISION (TREE_TYPE (exp))
9647 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9649 case NON_LVALUE_EXPR:
9650 case REFERENCE_EXPR:
9655 /* These cannot change zero->non-zero or vice versa. */
9656 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9659 case WITH_RECORD_EXPR:
9660 /* Put the object on the placeholder list, recurse through our first
9661 operand, and pop the list. */
9662 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9664 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9665 placeholder_list = TREE_CHAIN (placeholder_list);
9669 /* This is never less insns than evaluating the PLUS_EXPR followed by
9670 a test and can be longer if the test is eliminated. */
9672 /* Reduce to minus. */
9673 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9674 TREE_OPERAND (exp, 0),
9675 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9676 TREE_OPERAND (exp, 1))));
9677 /* Process as MINUS. */
9681 /* Non-zero iff operands of minus differ. */
9682 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9683 TREE_OPERAND (exp, 0),
9684 TREE_OPERAND (exp, 1)),
9685 NE, NE, if_false_label, if_true_label);
9689 /* If we are AND'ing with a small constant, do this comparison in the
9690 smallest type that fits. If the machine doesn't have comparisons
9691 that small, it will be converted back to the wider comparison.
9692 This helps if we are testing the sign bit of a narrower object.
9693 combine can't do this for us because it can't know whether a
9694 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9696 if (! SLOW_BYTE_ACCESS
9697 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9698 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9699 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9700 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9701 && (type = type_for_mode (mode, 1)) != 0
9702 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9703 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9704 != CODE_FOR_nothing))
9706 do_jump (convert (type, exp), if_false_label, if_true_label);
9711 case TRUTH_NOT_EXPR:
9712 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9715 case TRUTH_ANDIF_EXPR:
9716 if (if_false_label == 0)
9717 if_false_label = drop_through_label = gen_label_rtx ();
9718 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9719 start_cleanup_deferral ();
9720 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9721 end_cleanup_deferral ();
9724 case TRUTH_ORIF_EXPR:
9725 if (if_true_label == 0)
9726 if_true_label = drop_through_label = gen_label_rtx ();
9727 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9728 start_cleanup_deferral ();
9729 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9730 end_cleanup_deferral ();
9735 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9736 preserve_temp_slots (NULL_RTX);
9740 do_pending_stack_adjust ();
9741 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9747 case ARRAY_RANGE_REF:
9749 HOST_WIDE_INT bitsize, bitpos;
9751 enum machine_mode mode;
9755 unsigned int alignment;
9757 /* Get description of this reference. We don't actually care
9758 about the underlying object here. */
9759 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9760 &unsignedp, &volatilep, &alignment);
9762 type = type_for_size (bitsize, unsignedp);
9763 if (! SLOW_BYTE_ACCESS
9764 && type != 0 && bitsize >= 0
9765 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9766 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9767 != CODE_FOR_nothing))
9769 do_jump (convert (type, exp), if_false_label, if_true_label);
9776 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9777 if (integer_onep (TREE_OPERAND (exp, 1))
9778 && integer_zerop (TREE_OPERAND (exp, 2)))
9779 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9781 else if (integer_zerop (TREE_OPERAND (exp, 1))
9782 && integer_onep (TREE_OPERAND (exp, 2)))
9783 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9787 rtx label1 = gen_label_rtx ();
9788 drop_through_label = gen_label_rtx ();
9790 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9792 start_cleanup_deferral ();
9793 /* Now the THEN-expression. */
9794 do_jump (TREE_OPERAND (exp, 1),
9795 if_false_label ? if_false_label : drop_through_label,
9796 if_true_label ? if_true_label : drop_through_label);
9797 /* In case the do_jump just above never jumps. */
9798 do_pending_stack_adjust ();
9799 emit_label (label1);
9801 /* Now the ELSE-expression. */
9802 do_jump (TREE_OPERAND (exp, 2),
9803 if_false_label ? if_false_label : drop_through_label,
9804 if_true_label ? if_true_label : drop_through_label);
9805 end_cleanup_deferral ();
9811 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9813 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9814 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9816 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9817 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9820 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9821 fold (build (EQ_EXPR, TREE_TYPE (exp),
9822 fold (build1 (REALPART_EXPR,
9823 TREE_TYPE (inner_type),
9825 fold (build1 (REALPART_EXPR,
9826 TREE_TYPE (inner_type),
9828 fold (build (EQ_EXPR, TREE_TYPE (exp),
9829 fold (build1 (IMAGPART_EXPR,
9830 TREE_TYPE (inner_type),
9832 fold (build1 (IMAGPART_EXPR,
9833 TREE_TYPE (inner_type),
9835 if_false_label, if_true_label);
9838 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9839 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9841 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9842 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9843 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9845 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9851 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9853 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9854 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9856 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9857 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9860 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9861 fold (build (NE_EXPR, TREE_TYPE (exp),
9862 fold (build1 (REALPART_EXPR,
9863 TREE_TYPE (inner_type),
9865 fold (build1 (REALPART_EXPR,
9866 TREE_TYPE (inner_type),
9868 fold (build (NE_EXPR, TREE_TYPE (exp),
9869 fold (build1 (IMAGPART_EXPR,
9870 TREE_TYPE (inner_type),
9872 fold (build1 (IMAGPART_EXPR,
9873 TREE_TYPE (inner_type),
9875 if_false_label, if_true_label);
9878 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9879 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9881 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9882 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9883 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9885 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9890 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9891 if (GET_MODE_CLASS (mode) == MODE_INT
9892 && ! can_compare_p (LT, mode, ccp_jump))
9893 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9895 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9899 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9900 if (GET_MODE_CLASS (mode) == MODE_INT
9901 && ! can_compare_p (LE, mode, ccp_jump))
9902 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9904 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9908 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9909 if (GET_MODE_CLASS (mode) == MODE_INT
9910 && ! can_compare_p (GT, mode, ccp_jump))
9911 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9913 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9917 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9918 if (GET_MODE_CLASS (mode) == MODE_INT
9919 && ! can_compare_p (GE, mode, ccp_jump))
9920 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9922 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9925 case UNORDERED_EXPR:
9928 enum rtx_code cmp, rcmp;
9931 if (code == UNORDERED_EXPR)
9932 cmp = UNORDERED, rcmp = ORDERED;
9934 cmp = ORDERED, rcmp = UNORDERED;
9935 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9938 if (! can_compare_p (cmp, mode, ccp_jump)
9939 && (can_compare_p (rcmp, mode, ccp_jump)
9940 /* If the target doesn't provide either UNORDERED or ORDERED
9941 comparisons, canonicalize on UNORDERED for the library. */
9942 || rcmp == UNORDERED))
9946 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9948 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9953 enum rtx_code rcode1;
9954 enum tree_code tcode2;
9978 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9979 if (can_compare_p (rcode1, mode, ccp_jump))
9980 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9984 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9985 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9988 /* If the target doesn't support combined unordered
9989 compares, decompose into UNORDERED + comparison. */
9990 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9991 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9992 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9993 do_jump (exp, if_false_label, if_true_label);
9999 __builtin_expect (<test>, 0) and
10000 __builtin_expect (<test>, 1)
10002 We need to do this here, so that <test> is not converted to a SCC
10003 operation on machines that use condition code registers and COMPARE
10004 like the PowerPC, and then the jump is done based on whether the SCC
10005 operation produced a 1 or 0. */
10007 /* Check for a built-in function. */
10008 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10010 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10011 tree arglist = TREE_OPERAND (exp, 1);
10013 if (TREE_CODE (fndecl) == FUNCTION_DECL
10014 && DECL_BUILT_IN (fndecl)
10015 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10016 && arglist != NULL_TREE
10017 && TREE_CHAIN (arglist) != NULL_TREE)
10019 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10022 if (seq != NULL_RTX)
10029 /* fall through and generate the normal code. */
10033 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10035 /* This is not needed any more and causes poor code since it causes
10036 comparisons and tests from non-SI objects to have different code
10038 /* Copy to register to avoid generating bad insns by cse
10039 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10040 if (!cse_not_expected && GET_CODE (temp) == MEM)
10041 temp = copy_to_reg (temp);
10043 do_pending_stack_adjust ();
10044 /* Do any postincrements in the expression that was tested. */
10047 if (GET_CODE (temp) == CONST_INT
10048 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10049 || GET_CODE (temp) == LABEL_REF)
10051 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10053 emit_jump (target);
10055 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10056 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10057 /* Note swapping the labels gives us not-equal. */
10058 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10059 else if (GET_MODE (temp) != VOIDmode)
10060 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10061 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10062 GET_MODE (temp), NULL_RTX, 0,
10063 if_false_label, if_true_label);
10068 if (drop_through_label)
10070 /* If do_jump produces code that might be jumped around,
10071 do any stack adjusts from that code, before the place
10072 where control merges in. */
10073 do_pending_stack_adjust ();
10074 emit_label (drop_through_label);
10078 /* Given a comparison expression EXP for values too wide to be compared
10079 with one insn, test the comparison and jump to the appropriate label.
10080 The code of EXP is ignored; we always test GT if SWAP is 0,
10081 and LT if SWAP is 1. */
10084 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10087 rtx if_false_label, if_true_label;
10089 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10090 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10091 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10092 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10094 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10097 /* Compare OP0 with OP1, word at a time, in mode MODE.
10098 UNSIGNEDP says to do unsigned comparison.
10099 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10102 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10103 enum machine_mode mode;
10106 rtx if_false_label, if_true_label;
10108 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10109 rtx drop_through_label = 0;
10112 if (! if_true_label || ! if_false_label)
10113 drop_through_label = gen_label_rtx ();
10114 if (! if_true_label)
10115 if_true_label = drop_through_label;
10116 if (! if_false_label)
10117 if_false_label = drop_through_label;
10119 /* Compare a word at a time, high order first. */
10120 for (i = 0; i < nwords; i++)
10122 rtx op0_word, op1_word;
10124 if (WORDS_BIG_ENDIAN)
10126 op0_word = operand_subword_force (op0, i, mode);
10127 op1_word = operand_subword_force (op1, i, mode);
10131 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10132 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10135 /* All but high-order word must be compared as unsigned. */
10136 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10137 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10138 NULL_RTX, if_true_label);
10140 /* Consider lower words only if these are equal. */
10141 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10142 NULL_RTX, 0, NULL_RTX, if_false_label);
10145 if (if_false_label)
10146 emit_jump (if_false_label);
10147 if (drop_through_label)
10148 emit_label (drop_through_label);
10151 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10152 with one insn, test the comparison and jump to the appropriate label. */
10155 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10157 rtx if_false_label, if_true_label;
10159 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10160 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10161 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10162 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10164 rtx drop_through_label = 0;
10166 if (! if_false_label)
10167 drop_through_label = if_false_label = gen_label_rtx ();
10169 for (i = 0; i < nwords; i++)
10170 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10171 operand_subword_force (op1, i, mode),
10172 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10173 word_mode, NULL_RTX, 0, if_false_label,
10177 emit_jump (if_true_label);
10178 if (drop_through_label)
10179 emit_label (drop_through_label);
10182 /* Jump according to whether OP0 is 0.
10183 We assume that OP0 has an integer mode that is too wide
10184 for the available compare insns. */
10187 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10189 rtx if_false_label, if_true_label;
10191 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10194 rtx drop_through_label = 0;
10196 /* The fastest way of doing this comparison on almost any machine is to
10197 "or" all the words and compare the result. If all have to be loaded
10198 from memory and this is a very wide item, it's possible this may
10199 be slower, but that's highly unlikely. */
10201 part = gen_reg_rtx (word_mode);
10202 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10203 for (i = 1; i < nwords && part != 0; i++)
10204 part = expand_binop (word_mode, ior_optab, part,
10205 operand_subword_force (op0, i, GET_MODE (op0)),
10206 part, 1, OPTAB_WIDEN);
10210 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10211 NULL_RTX, 0, if_false_label, if_true_label);
10216 /* If we couldn't do the "or" simply, do this with a series of compares. */
10217 if (! if_false_label)
10218 drop_through_label = if_false_label = gen_label_rtx ();
10220 for (i = 0; i < nwords; i++)
10221 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10222 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10223 if_false_label, NULL_RTX);
10226 emit_jump (if_true_label);
10228 if (drop_through_label)
10229 emit_label (drop_through_label);
10232 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10233 (including code to compute the values to be compared)
10234 and set (CC0) according to the result.
10235 The decision as to signed or unsigned comparison must be made by the caller.
10237 We force a stack adjustment unless there are currently
10238 things pushed on the stack that aren't yet used.
10240 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10243 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10244 size of MODE should be used. */
10247 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10249 enum rtx_code code;
10251 enum machine_mode mode;
10253 unsigned int align;
10257 /* If one operand is constant, make it the second one. Only do this
10258 if the other operand is not constant as well. */
10260 if (swap_commutative_operands_p (op0, op1))
10265 code = swap_condition (code);
10268 if (flag_force_mem)
10270 op0 = force_not_mem (op0);
10271 op1 = force_not_mem (op1);
10274 do_pending_stack_adjust ();
10276 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10277 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10281 /* There's no need to do this now that combine.c can eliminate lots of
10282 sign extensions. This can be less efficient in certain cases on other
10285 /* If this is a signed equality comparison, we can do it as an
10286 unsigned comparison since zero-extension is cheaper than sign
10287 extension and comparisons with zero are done as unsigned. This is
10288 the case even on machines that can do fast sign extension, since
10289 zero-extension is easier to combine with other operations than
10290 sign-extension is. If we are comparing against a constant, we must
10291 convert it to what it would look like unsigned. */
10292 if ((code == EQ || code == NE) && ! unsignedp
10293 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10295 if (GET_CODE (op1) == CONST_INT
10296 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10297 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10302 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10304 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10307 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10308 The decision as to signed or unsigned comparison must be made by the caller.
10310 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10313 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10314 size of MODE should be used. */
10317 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10318 if_false_label, if_true_label)
10320 enum rtx_code code;
10322 enum machine_mode mode;
10324 unsigned int align;
10325 rtx if_false_label, if_true_label;
10328 int dummy_true_label = 0;
10330 /* Reverse the comparison if that is safe and we want to jump if it is
10332 if (! if_true_label && ! FLOAT_MODE_P (mode))
10334 if_true_label = if_false_label;
10335 if_false_label = 0;
10336 code = reverse_condition (code);
10339 /* If one operand is constant, make it the second one. Only do this
10340 if the other operand is not constant as well. */
10342 if (swap_commutative_operands_p (op0, op1))
10347 code = swap_condition (code);
10350 if (flag_force_mem)
10352 op0 = force_not_mem (op0);
10353 op1 = force_not_mem (op1);
10356 do_pending_stack_adjust ();
10358 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10359 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10361 if (tem == const_true_rtx)
10364 emit_jump (if_true_label);
10368 if (if_false_label)
10369 emit_jump (if_false_label);
10375 /* There's no need to do this now that combine.c can eliminate lots of
10376 sign extensions. This can be less efficient in certain cases on other
10379 /* If this is a signed equality comparison, we can do it as an
10380 unsigned comparison since zero-extension is cheaper than sign
10381 extension and comparisons with zero are done as unsigned. This is
10382 the case even on machines that can do fast sign extension, since
10383 zero-extension is easier to combine with other operations than
10384 sign-extension is. If we are comparing against a constant, we must
10385 convert it to what it would look like unsigned. */
10386 if ((code == EQ || code == NE) && ! unsignedp
10387 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10389 if (GET_CODE (op1) == CONST_INT
10390 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10391 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10396 if (! if_true_label)
10398 dummy_true_label = 1;
10399 if_true_label = gen_label_rtx ();
10402 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10405 if (if_false_label)
10406 emit_jump (if_false_label);
10407 if (dummy_true_label)
10408 emit_label (if_true_label);
10411 /* Generate code for a comparison expression EXP (including code to compute
10412 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10413 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10414 generated code will drop through.
10415 SIGNED_CODE should be the rtx operation for this comparison for
10416 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10418 We force a stack adjustment unless there are currently
10419 things pushed on the stack that aren't yet used. */
10422 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10425 enum rtx_code signed_code, unsigned_code;
10426 rtx if_false_label, if_true_label;
10428 unsigned int align0, align1;
10431 enum machine_mode mode;
10433 enum rtx_code code;
10435 /* Don't crash if the comparison was erroneous. */
10436 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10437 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10440 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10441 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10444 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10445 mode = TYPE_MODE (type);
10446 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10447 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10448 || (GET_MODE_BITSIZE (mode)
10449 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10452 /* op0 might have been replaced by promoted constant, in which
10453 case the type of second argument should be used. */
10454 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10455 mode = TYPE_MODE (type);
10457 unsignedp = TREE_UNSIGNED (type);
10458 code = unsignedp ? unsigned_code : signed_code;
10460 #ifdef HAVE_canonicalize_funcptr_for_compare
10461 /* If function pointers need to be "canonicalized" before they can
10462 be reliably compared, then canonicalize them. */
10463 if (HAVE_canonicalize_funcptr_for_compare
10464 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10465 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10468 rtx new_op0 = gen_reg_rtx (mode);
10470 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10474 if (HAVE_canonicalize_funcptr_for_compare
10475 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10476 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10479 rtx new_op1 = gen_reg_rtx (mode);
10481 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10486 /* Do any postincrements in the expression that was tested. */
10489 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10491 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10492 MIN (align0, align1),
10493 if_false_label, if_true_label);
10496 /* Generate code to calculate EXP using a store-flag instruction
10497 and return an rtx for the result. EXP is either a comparison
10498 or a TRUTH_NOT_EXPR whose operand is a comparison.
10500 If TARGET is nonzero, store the result there if convenient.
10502 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10505 Return zero if there is no suitable set-flag instruction
10506 available on this machine.
10508 Once expand_expr has been called on the arguments of the comparison,
10509 we are committed to doing the store flag, since it is not safe to
10510 re-evaluate the expression. We emit the store-flag insn by calling
10511 emit_store_flag, but only expand the arguments if we have a reason
10512 to believe that emit_store_flag will be successful. If we think that
10513 it will, but it isn't, we have to simulate the store-flag with a
10514 set/jump/set sequence. */
10517 do_store_flag (exp, target, mode, only_cheap)
10520 enum machine_mode mode;
10523 enum rtx_code code;
10524 tree arg0, arg1, type;
10526 enum machine_mode operand_mode;
10530 enum insn_code icode;
10531 rtx subtarget = target;
10534 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10535 result at the end. We can't simply invert the test since it would
10536 have already been inverted if it were valid. This case occurs for
10537 some floating-point comparisons. */
10539 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10540 invert = 1, exp = TREE_OPERAND (exp, 0);
10542 arg0 = TREE_OPERAND (exp, 0);
10543 arg1 = TREE_OPERAND (exp, 1);
10545 /* Don't crash if the comparison was erroneous. */
10546 if (arg0 == error_mark_node || arg1 == error_mark_node)
10549 type = TREE_TYPE (arg0);
10550 operand_mode = TYPE_MODE (type);
10551 unsignedp = TREE_UNSIGNED (type);
10553 /* We won't bother with BLKmode store-flag operations because it would mean
10554 passing a lot of information to emit_store_flag. */
10555 if (operand_mode == BLKmode)
10558 /* We won't bother with store-flag operations involving function pointers
10559 when function pointers must be canonicalized before comparisons. */
10560 #ifdef HAVE_canonicalize_funcptr_for_compare
10561 if (HAVE_canonicalize_funcptr_for_compare
10562 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10563 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10565 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10566 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10567 == FUNCTION_TYPE))))
10574 /* Get the rtx comparison code to use. We know that EXP is a comparison
10575 operation of some type. Some comparisons against 1 and -1 can be
10576 converted to comparisons with zero. Do so here so that the tests
10577 below will be aware that we have a comparison with zero. These
10578 tests will not catch constants in the first operand, but constants
10579 are rarely passed as the first operand. */
10581 switch (TREE_CODE (exp))
10590 if (integer_onep (arg1))
10591 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10593 code = unsignedp ? LTU : LT;
10596 if (! unsignedp && integer_all_onesp (arg1))
10597 arg1 = integer_zero_node, code = LT;
10599 code = unsignedp ? LEU : LE;
10602 if (! unsignedp && integer_all_onesp (arg1))
10603 arg1 = integer_zero_node, code = GE;
10605 code = unsignedp ? GTU : GT;
10608 if (integer_onep (arg1))
10609 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10611 code = unsignedp ? GEU : GE;
10614 case UNORDERED_EXPR:
10640 /* Put a constant second. */
10641 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10643 tem = arg0; arg0 = arg1; arg1 = tem;
10644 code = swap_condition (code);
10647 /* If this is an equality or inequality test of a single bit, we can
10648 do this by shifting the bit being tested to the low-order bit and
10649 masking the result with the constant 1. If the condition was EQ,
10650 we xor it with 1. This does not require an scc insn and is faster
10651 than an scc insn even if we have it. */
10653 if ((code == NE || code == EQ)
10654 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10655 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10657 tree inner = TREE_OPERAND (arg0, 0);
10658 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10661 /* If INNER is a right shift of a constant and it plus BITNUM does
10662 not overflow, adjust BITNUM and INNER. */
10664 if (TREE_CODE (inner) == RSHIFT_EXPR
10665 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10666 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10667 && bitnum < TYPE_PRECISION (type)
10668 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10669 bitnum - TYPE_PRECISION (type)))
10671 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10672 inner = TREE_OPERAND (inner, 0);
10675 /* If we are going to be able to omit the AND below, we must do our
10676 operations as unsigned. If we must use the AND, we have a choice.
10677 Normally unsigned is faster, but for some machines signed is. */
10678 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10679 #ifdef LOAD_EXTEND_OP
10680 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10686 if (! get_subtarget (subtarget)
10687 || GET_MODE (subtarget) != operand_mode
10688 || ! safe_from_p (subtarget, inner, 1))
10691 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10694 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10695 size_int (bitnum), subtarget, ops_unsignedp);
10697 if (GET_MODE (op0) != mode)
10698 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10700 if ((code == EQ && ! invert) || (code == NE && invert))
10701 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10702 ops_unsignedp, OPTAB_LIB_WIDEN);
10704 /* Put the AND last so it can combine with more things. */
10705 if (bitnum != TYPE_PRECISION (type) - 1)
10706 op0 = expand_and (op0, const1_rtx, subtarget);
10711 /* Now see if we are likely to be able to do this. Return if not. */
10712 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10715 icode = setcc_gen_code[(int) code];
10716 if (icode == CODE_FOR_nothing
10717 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10719 /* We can only do this if it is one of the special cases that
10720 can be handled without an scc insn. */
10721 if ((code == LT && integer_zerop (arg1))
10722 || (! only_cheap && code == GE && integer_zerop (arg1)))
10724 else if (BRANCH_COST >= 0
10725 && ! only_cheap && (code == NE || code == EQ)
10726 && TREE_CODE (type) != REAL_TYPE
10727 && ((abs_optab->handlers[(int) operand_mode].insn_code
10728 != CODE_FOR_nothing)
10729 || (ffs_optab->handlers[(int) operand_mode].insn_code
10730 != CODE_FOR_nothing)))
10736 if (! get_subtarget (target)
10737 || GET_MODE (subtarget) != operand_mode
10738 || ! safe_from_p (subtarget, arg1, 1))
10741 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10742 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10745 target = gen_reg_rtx (mode);
10747 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10748 because, if the emit_store_flag does anything it will succeed and
10749 OP0 and OP1 will not be used subsequently. */
10751 result = emit_store_flag (target, code,
10752 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10753 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10754 operand_mode, unsignedp, 1);
10759 result = expand_binop (mode, xor_optab, result, const1_rtx,
10760 result, 0, OPTAB_LIB_WIDEN);
10764 /* If this failed, we have to do this with set/compare/jump/set code. */
10765 if (GET_CODE (target) != REG
10766 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10767 target = gen_reg_rtx (GET_MODE (target));
10769 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10770 result = compare_from_rtx (op0, op1, code, unsignedp,
10771 operand_mode, NULL_RTX, 0);
10772 if (GET_CODE (result) == CONST_INT)
10773 return (((result == const0_rtx && ! invert)
10774 || (result != const0_rtx && invert))
10775 ? const0_rtx : const1_rtx);
10777 label = gen_label_rtx ();
10778 if (bcc_gen_fctn[(int) code] == 0)
10781 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10782 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10783 emit_label (label);
10789 /* Stubs in case we haven't got a casesi insn. */
10790 #ifndef HAVE_casesi
10791 # define HAVE_casesi 0
10792 # define gen_casesi(a, b, c, d, e) (0)
10793 # define CODE_FOR_casesi CODE_FOR_nothing
10796 /* If the machine does not have a case insn that compares the bounds,
10797 this means extra overhead for dispatch tables, which raises the
10798 threshold for using them. */
10799 #ifndef CASE_VALUES_THRESHOLD
10800 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10801 #endif /* CASE_VALUES_THRESHOLD */
10804 case_values_threshold ()
10806 return CASE_VALUES_THRESHOLD;
10809 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10810 0 otherwise (i.e. if there is no casesi instruction). */
10812 try_casesi (index_type, index_expr, minval, range,
10813 table_label, default_label)
10814 tree index_type, index_expr, minval, range;
10815 rtx table_label ATTRIBUTE_UNUSED;
10818 enum machine_mode index_mode = SImode;
10819 int index_bits = GET_MODE_BITSIZE (index_mode);
10820 rtx op1, op2, index;
10821 enum machine_mode op_mode;
10826 /* Convert the index to SImode. */
10827 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10829 enum machine_mode omode = TYPE_MODE (index_type);
10830 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10832 /* We must handle the endpoints in the original mode. */
10833 index_expr = build (MINUS_EXPR, index_type,
10834 index_expr, minval);
10835 minval = integer_zero_node;
10836 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10837 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10838 omode, 1, 0, default_label);
10839 /* Now we can safely truncate. */
10840 index = convert_to_mode (index_mode, index, 0);
10844 if (TYPE_MODE (index_type) != index_mode)
10846 index_expr = convert (type_for_size (index_bits, 0),
10848 index_type = TREE_TYPE (index_expr);
10851 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10854 index = protect_from_queue (index, 0);
10855 do_pending_stack_adjust ();
10857 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10858 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10860 index = copy_to_mode_reg (op_mode, index);
10862 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10864 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10865 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10866 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10867 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10869 op1 = copy_to_mode_reg (op_mode, op1);
10871 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10873 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10874 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10875 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10876 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10878 op2 = copy_to_mode_reg (op_mode, op2);
10880 emit_jump_insn (gen_casesi (index, op1, op2,
10881 table_label, default_label));
10885 /* Attempt to generate a tablejump instruction; same concept. */
10886 #ifndef HAVE_tablejump
10887 #define HAVE_tablejump 0
10888 #define gen_tablejump(x, y) (0)
10891 /* Subroutine of the next function.
10893 INDEX is the value being switched on, with the lowest value
10894 in the table already subtracted.
10895 MODE is its expected mode (needed if INDEX is constant).
10896 RANGE is the length of the jump table.
10897 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10899 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10900 index value is out of range. */
10903 do_tablejump (index, mode, range, table_label, default_label)
10904 rtx index, range, table_label, default_label;
10905 enum machine_mode mode;
10909 /* Do an unsigned comparison (in the proper mode) between the index
10910 expression and the value which represents the length of the range.
10911 Since we just finished subtracting the lower bound of the range
10912 from the index expression, this comparison allows us to simultaneously
10913 check that the original index expression value is both greater than
10914 or equal to the minimum value of the range and less than or equal to
10915 the maximum value of the range. */
10917 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10920 /* If index is in range, it must fit in Pmode.
10921 Convert to Pmode so we can index with it. */
10923 index = convert_to_mode (Pmode, index, 1);
10925 /* Don't let a MEM slip thru, because then INDEX that comes
10926 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10927 and break_out_memory_refs will go to work on it and mess it up. */
10928 #ifdef PIC_CASE_VECTOR_ADDRESS
10929 if (flag_pic && GET_CODE (index) != REG)
10930 index = copy_to_mode_reg (Pmode, index);
10933 /* If flag_force_addr were to affect this address
10934 it could interfere with the tricky assumptions made
10935 about addresses that contain label-refs,
10936 which may be valid only very near the tablejump itself. */
10937 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10938 GET_MODE_SIZE, because this indicates how large insns are. The other
10939 uses should all be Pmode, because they are addresses. This code
10940 could fail if addresses and insns are not the same size. */
10941 index = gen_rtx_PLUS (Pmode,
10942 gen_rtx_MULT (Pmode, index,
10943 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10944 gen_rtx_LABEL_REF (Pmode, table_label));
10945 #ifdef PIC_CASE_VECTOR_ADDRESS
10947 index = PIC_CASE_VECTOR_ADDRESS (index);
10950 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10951 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10952 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10953 RTX_UNCHANGING_P (vector) = 1;
10954 convert_move (temp, vector, 0);
10956 emit_jump_insn (gen_tablejump (temp, table_label));
10958 /* If we are generating PIC code or if the table is PC-relative, the
10959 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10960 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10965 try_tablejump (index_type, index_expr, minval, range,
10966 table_label, default_label)
10967 tree index_type, index_expr, minval, range;
10968 rtx table_label, default_label;
10972 if (! HAVE_tablejump)
10975 index_expr = fold (build (MINUS_EXPR, index_type,
10976 convert (index_type, index_expr),
10977 convert (index_type, minval)));
10978 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10980 index = protect_from_queue (index, 0);
10981 do_pending_stack_adjust ();
10983 do_tablejump (index, TYPE_MODE (index_type),
10984 convert_modes (TYPE_MODE (index_type),
10985 TYPE_MODE (TREE_TYPE (range)),
10986 expand_expr (range, NULL_RTX,
10988 TREE_UNSIGNED (TREE_TYPE (range))),
10989 table_label, default_label);