1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first. */
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
65 #define STACK_PUSH_CODE PRE_INC
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
74 /* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
81 int (*lang_safe_from_p) PARAMS ((rtx, tree));
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* Don't check memory usage, since code is being emitted to check a memory
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
94 static int in_check_memory_usage;
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
99 /* This structure is used by move_by_pieces to describe the move to
101 struct move_by_pieces
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
116 /* This structure is used by store_by_pieces to describe the clear to
119 struct store_by_pieces
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
132 extern struct obstack permanent_obstack;
134 static rtx get_push_address PARAMS ((int));
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
140 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
150 struct store_by_pieces *));
151 static rtx get_subtarget PARAMS ((rtx));
152 static int is_zeros_p PARAMS ((tree));
153 static int mostly_zeros_p PARAMS ((tree));
154 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, unsigned int, int,
158 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int,
163 unsigned int, HOST_WIDE_INT, int));
164 static enum memory_use_mode
165 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
166 static rtx var_rtx PARAMS ((tree));
167 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
176 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
178 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
180 /* Record for each mode whether we can move a register directly to or
181 from an object of that mode in memory. If we can't, we won't try
182 to use that mode directly when accessing a field of that mode. */
184 static char direct_load[NUM_MACHINE_MODES];
185 static char direct_store[NUM_MACHINE_MODES];
187 /* If a memory-to-memory move would take MOVE_RATIO or more simple
188 move-instruction sequences, we will do a movstr or libcall instead. */
191 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
194 /* If we are optimizing for space (-Os), cut down the default move ratio. */
195 #define MOVE_RATIO (optimize_size ? 3 : 15)
199 /* This macro is used to determine whether move_by_pieces should be called
200 to perform a structure copy. */
201 #ifndef MOVE_BY_PIECES_P
202 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
206 /* This array records the insn_code of insns to perform block moves. */
207 enum insn_code movstr_optab[NUM_MACHINE_MODES];
209 /* This array records the insn_code of insns to perform block clears. */
210 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
212 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
214 #ifndef SLOW_UNALIGNED_ACCESS
215 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
218 /* This is run once per compilation to set up which modes can be used
219 directly in memory and to initialize the block move optab. */
225 enum machine_mode mode;
231 /* Try indexing by frame ptr and try by stack ptr.
232 It is known that on the Convex the stack ptr isn't a valid index.
233 With luck, one or the other is valid on any machine. */
234 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
235 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
237 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
238 pat = PATTERN (insn);
240 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
241 mode = (enum machine_mode) ((int) mode + 1))
246 direct_load[(int) mode] = direct_store[(int) mode] = 0;
247 PUT_MODE (mem, mode);
248 PUT_MODE (mem1, mode);
250 /* See if there is some register that can be used in this mode and
251 directly loaded or stored from memory. */
253 if (mode != VOIDmode && mode != BLKmode)
254 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
255 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
258 if (! HARD_REGNO_MODE_OK (regno, mode))
261 reg = gen_rtx_REG (mode, regno);
264 SET_DEST (pat) = reg;
265 if (recog (pat, insn, &num_clobbers) >= 0)
266 direct_load[(int) mode] = 1;
268 SET_SRC (pat) = mem1;
269 SET_DEST (pat) = reg;
270 if (recog (pat, insn, &num_clobbers) >= 0)
271 direct_load[(int) mode] = 1;
274 SET_DEST (pat) = mem;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_store[(int) mode] = 1;
279 SET_DEST (pat) = mem1;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_store[(int) mode] = 1;
288 /* This is run at the start of compiling a function. */
293 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
296 pending_stack_adjust = 0;
297 stack_pointer_delta = 0;
298 inhibit_defer_pop = 0;
300 apply_args_value = 0;
306 struct expr_status *p;
311 ggc_mark_rtx (p->x_saveregs_value);
312 ggc_mark_rtx (p->x_apply_args_value);
313 ggc_mark_rtx (p->x_forced_labels);
324 /* Small sanity check that the queue is empty at the end of a function. */
327 finish_expr_for_function ()
333 /* Manage the queue of increment instructions to be output
334 for POSTINCREMENT_EXPR expressions, etc. */
336 /* Queue up to increment (or change) VAR later. BODY says how:
337 BODY should be the same thing you would pass to emit_insn
338 to increment right away. It will go to emit_insn later on.
340 The value is a QUEUED expression to be used in place of VAR
341 where you want to guarantee the pre-incrementation value of VAR. */
344 enqueue_insn (var, body)
347 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
348 body, pending_chain);
349 return pending_chain;
352 /* Use protect_from_queue to convert a QUEUED expression
353 into something that you can put immediately into an instruction.
354 If the queued incrementation has not happened yet,
355 protect_from_queue returns the variable itself.
356 If the incrementation has happened, protect_from_queue returns a temp
357 that contains a copy of the old value of the variable.
359 Any time an rtx which might possibly be a QUEUED is to be put
360 into an instruction, it must be passed through protect_from_queue first.
361 QUEUED expressions are not meaningful in instructions.
363 Do not pass a value through protect_from_queue and then hold
364 on to it for a while before putting it in an instruction!
365 If the queue is flushed in between, incorrect code will result. */
368 protect_from_queue (x, modify)
372 RTX_CODE code = GET_CODE (x);
374 #if 0 /* A QUEUED can hang around after the queue is forced out. */
375 /* Shortcut for most common case. */
376 if (pending_chain == 0)
382 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
383 use of autoincrement. Make a copy of the contents of the memory
384 location rather than a copy of the address, but not if the value is
385 of mode BLKmode. Don't modify X in place since it might be
387 if (code == MEM && GET_MODE (x) != BLKmode
388 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
391 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
395 rtx temp = gen_reg_rtx (GET_MODE (x));
397 emit_insn_before (gen_move_insn (temp, new),
402 /* Copy the address into a pseudo, so that the returned value
403 remains correct across calls to emit_queue. */
404 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
407 /* Otherwise, recursively protect the subexpressions of all
408 the kinds of rtx's that can contain a QUEUED. */
411 rtx tem = protect_from_queue (XEXP (x, 0), 0);
412 if (tem != XEXP (x, 0))
418 else if (code == PLUS || code == MULT)
420 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
421 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
422 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
431 /* If the increment has not happened, use the variable itself. Copy it
432 into a new pseudo so that the value remains correct across calls to
434 if (QUEUED_INSN (x) == 0)
435 return copy_to_reg (QUEUED_VAR (x));
436 /* If the increment has happened and a pre-increment copy exists,
438 if (QUEUED_COPY (x) != 0)
439 return QUEUED_COPY (x);
440 /* The increment has happened but we haven't set up a pre-increment copy.
441 Set one up now, and use it. */
442 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
443 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
445 return QUEUED_COPY (x);
448 /* Return nonzero if X contains a QUEUED expression:
449 if it contains anything that will be altered by a queued increment.
450 We handle only combinations of MEM, PLUS, MINUS and MULT operators
451 since memory addresses generally contain only those. */
457 enum rtx_code code = GET_CODE (x);
463 return queued_subexp_p (XEXP (x, 0));
467 return (queued_subexp_p (XEXP (x, 0))
468 || queued_subexp_p (XEXP (x, 1)));
474 /* Perform all the pending incrementations. */
480 while ((p = pending_chain))
482 rtx body = QUEUED_BODY (p);
484 if (GET_CODE (body) == SEQUENCE)
486 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
487 emit_insn (QUEUED_BODY (p));
490 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
491 pending_chain = QUEUED_NEXT (p);
495 /* Copy data from FROM to TO, where the machine modes are not the same.
496 Both modes may be integer, or both may be floating.
497 UNSIGNEDP should be nonzero if FROM is an unsigned type.
498 This causes zero-extension instead of sign-extension. */
501 convert_move (to, from, unsignedp)
505 enum machine_mode to_mode = GET_MODE (to);
506 enum machine_mode from_mode = GET_MODE (from);
507 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
508 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
512 /* rtx code for making an equivalent value. */
513 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
515 to = protect_from_queue (to, 1);
516 from = protect_from_queue (from, 0);
518 if (to_real != from_real)
521 /* If FROM is a SUBREG that indicates that we have already done at least
522 the required extension, strip it. We don't handle such SUBREGs as
525 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
526 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
527 >= GET_MODE_SIZE (to_mode))
528 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
529 from = gen_lowpart (to_mode, from), from_mode = to_mode;
531 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
534 if (to_mode == from_mode
535 || (from_mode == VOIDmode && CONSTANT_P (from)))
537 emit_move_insn (to, from);
541 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
543 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
546 if (VECTOR_MODE_P (to_mode))
547 from = gen_rtx_SUBREG (to_mode, from, 0);
549 to = gen_rtx_SUBREG (from_mode, to, 0);
551 emit_move_insn (to, from);
555 if (to_real != from_real)
562 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
564 /* Try converting directly if the insn is supported. */
565 if ((code = can_extend_p (to_mode, from_mode, 0))
568 emit_unop_insn (code, to, from, UNKNOWN);
573 #ifdef HAVE_trunchfqf2
574 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
576 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
580 #ifdef HAVE_trunctqfqf2
581 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
583 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
587 #ifdef HAVE_truncsfqf2
588 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
590 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
594 #ifdef HAVE_truncdfqf2
595 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
597 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
601 #ifdef HAVE_truncxfqf2
602 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
604 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
608 #ifdef HAVE_trunctfqf2
609 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
611 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
616 #ifdef HAVE_trunctqfhf2
617 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
619 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
623 #ifdef HAVE_truncsfhf2
624 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
626 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
630 #ifdef HAVE_truncdfhf2
631 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
633 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
637 #ifdef HAVE_truncxfhf2
638 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
640 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
644 #ifdef HAVE_trunctfhf2
645 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
647 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
652 #ifdef HAVE_truncsftqf2
653 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
655 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
659 #ifdef HAVE_truncdftqf2
660 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
662 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
666 #ifdef HAVE_truncxftqf2
667 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
669 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
673 #ifdef HAVE_trunctftqf2
674 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
676 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
681 #ifdef HAVE_truncdfsf2
682 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
684 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
688 #ifdef HAVE_truncxfsf2
689 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
691 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
695 #ifdef HAVE_trunctfsf2
696 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
698 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
702 #ifdef HAVE_truncxfdf2
703 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
705 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
709 #ifdef HAVE_trunctfdf2
710 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
712 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
724 libcall = extendsfdf2_libfunc;
728 libcall = extendsfxf2_libfunc;
732 libcall = extendsftf2_libfunc;
744 libcall = truncdfsf2_libfunc;
748 libcall = extenddfxf2_libfunc;
752 libcall = extenddftf2_libfunc;
764 libcall = truncxfsf2_libfunc;
768 libcall = truncxfdf2_libfunc;
780 libcall = trunctfsf2_libfunc;
784 libcall = trunctfdf2_libfunc;
796 if (libcall == (rtx) 0)
797 /* This conversion is not implemented yet. */
801 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
803 insns = get_insns ();
805 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
810 /* Now both modes are integers. */
812 /* Handle expanding beyond a word. */
813 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
814 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
821 enum machine_mode lowpart_mode;
822 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
824 /* Try converting directly if the insn is supported. */
825 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 /* If FROM is a SUBREG, put it into a register. Do this
829 so that we always generate the same set of insns for
830 better cse'ing; if an intermediate assignment occurred,
831 we won't be doing the operation directly on the SUBREG. */
832 if (optimize > 0 && GET_CODE (from) == SUBREG)
833 from = force_reg (from_mode, from);
834 emit_unop_insn (code, to, from, equiv_code);
837 /* Next, try converting via full word. */
838 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
839 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
840 != CODE_FOR_nothing))
842 if (GET_CODE (to) == REG)
843 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
844 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
845 emit_unop_insn (code, to,
846 gen_lowpart (word_mode, to), equiv_code);
850 /* No special multiword conversion insn; do it by hand. */
853 /* Since we will turn this into a no conflict block, we must ensure
854 that the source does not overlap the target. */
856 if (reg_overlap_mentioned_p (to, from))
857 from = force_reg (from_mode, from);
859 /* Get a copy of FROM widened to a word, if necessary. */
860 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
861 lowpart_mode = word_mode;
863 lowpart_mode = from_mode;
865 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
867 lowpart = gen_lowpart (lowpart_mode, to);
868 emit_move_insn (lowpart, lowfrom);
870 /* Compute the value to put in each remaining word. */
872 fill_value = const0_rtx;
877 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
878 && STORE_FLAG_VALUE == -1)
880 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
882 fill_value = gen_reg_rtx (word_mode);
883 emit_insn (gen_slt (fill_value));
889 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
890 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
892 fill_value = convert_to_mode (word_mode, fill_value, 1);
896 /* Fill the remaining words. */
897 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
899 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
900 rtx subword = operand_subword (to, index, 1, to_mode);
905 if (fill_value != subword)
906 emit_move_insn (subword, fill_value);
909 insns = get_insns ();
912 emit_no_conflict_block (insns, to, from, NULL_RTX,
913 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
917 /* Truncating multi-word to a word or less. */
918 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
919 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 convert_move (to, gen_lowpart (word_mode, from), 0);
932 /* Handle pointer conversion. */ /* SPEE 900220. */
933 if (to_mode == PQImode)
935 if (from_mode != QImode)
936 from = convert_to_mode (QImode, from, unsignedp);
938 #ifdef HAVE_truncqipqi2
939 if (HAVE_truncqipqi2)
941 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
944 #endif /* HAVE_truncqipqi2 */
948 if (from_mode == PQImode)
950 if (to_mode != QImode)
952 from = convert_to_mode (QImode, from, unsignedp);
957 #ifdef HAVE_extendpqiqi2
958 if (HAVE_extendpqiqi2)
960 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
963 #endif /* HAVE_extendpqiqi2 */
968 if (to_mode == PSImode)
970 if (from_mode != SImode)
971 from = convert_to_mode (SImode, from, unsignedp);
973 #ifdef HAVE_truncsipsi2
974 if (HAVE_truncsipsi2)
976 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
979 #endif /* HAVE_truncsipsi2 */
983 if (from_mode == PSImode)
985 if (to_mode != SImode)
987 from = convert_to_mode (SImode, from, unsignedp);
992 #ifdef HAVE_extendpsisi2
993 if (! unsignedp && HAVE_extendpsisi2)
995 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
998 #endif /* HAVE_extendpsisi2 */
999 #ifdef HAVE_zero_extendpsisi2
1000 if (unsignedp && HAVE_zero_extendpsisi2)
1002 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1005 #endif /* HAVE_zero_extendpsisi2 */
1010 if (to_mode == PDImode)
1012 if (from_mode != DImode)
1013 from = convert_to_mode (DImode, from, unsignedp);
1015 #ifdef HAVE_truncdipdi2
1016 if (HAVE_truncdipdi2)
1018 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1021 #endif /* HAVE_truncdipdi2 */
1025 if (from_mode == PDImode)
1027 if (to_mode != DImode)
1029 from = convert_to_mode (DImode, from, unsignedp);
1034 #ifdef HAVE_extendpdidi2
1035 if (HAVE_extendpdidi2)
1037 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1040 #endif /* HAVE_extendpdidi2 */
1045 /* Now follow all the conversions between integers
1046 no more than a word long. */
1048 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1049 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1050 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1051 GET_MODE_BITSIZE (from_mode)))
1053 if (!((GET_CODE (from) == MEM
1054 && ! MEM_VOLATILE_P (from)
1055 && direct_load[(int) to_mode]
1056 && ! mode_dependent_address_p (XEXP (from, 0)))
1057 || GET_CODE (from) == REG
1058 || GET_CODE (from) == SUBREG))
1059 from = force_reg (from_mode, from);
1060 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1061 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1062 from = copy_to_reg (from);
1063 emit_move_insn (to, gen_lowpart (to_mode, from));
1067 /* Handle extension. */
1068 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1070 /* Convert directly if that works. */
1071 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1072 != CODE_FOR_nothing)
1074 emit_unop_insn (code, to, from, equiv_code);
1079 enum machine_mode intermediate;
1083 /* Search for a mode to convert via. */
1084 for (intermediate = from_mode; intermediate != VOIDmode;
1085 intermediate = GET_MODE_WIDER_MODE (intermediate))
1086 if (((can_extend_p (to_mode, intermediate, unsignedp)
1087 != CODE_FOR_nothing)
1088 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1089 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1090 GET_MODE_BITSIZE (intermediate))))
1091 && (can_extend_p (intermediate, from_mode, unsignedp)
1092 != CODE_FOR_nothing))
1094 convert_move (to, convert_to_mode (intermediate, from,
1095 unsignedp), unsignedp);
1099 /* No suitable intermediate mode.
1100 Generate what we need with shifts. */
1101 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1102 - GET_MODE_BITSIZE (from_mode), 0);
1103 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1104 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1106 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1109 emit_move_insn (to, tmp);
1114 /* Support special truncate insns for certain modes. */
1116 if (from_mode == DImode && to_mode == SImode)
1118 #ifdef HAVE_truncdisi2
1119 if (HAVE_truncdisi2)
1121 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1125 convert_move (to, force_reg (from_mode, from), unsignedp);
1129 if (from_mode == DImode && to_mode == HImode)
1131 #ifdef HAVE_truncdihi2
1132 if (HAVE_truncdihi2)
1134 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1138 convert_move (to, force_reg (from_mode, from), unsignedp);
1142 if (from_mode == DImode && to_mode == QImode)
1144 #ifdef HAVE_truncdiqi2
1145 if (HAVE_truncdiqi2)
1147 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1151 convert_move (to, force_reg (from_mode, from), unsignedp);
1155 if (from_mode == SImode && to_mode == HImode)
1157 #ifdef HAVE_truncsihi2
1158 if (HAVE_truncsihi2)
1160 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1168 if (from_mode == SImode && to_mode == QImode)
1170 #ifdef HAVE_truncsiqi2
1171 if (HAVE_truncsiqi2)
1173 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1181 if (from_mode == HImode && to_mode == QImode)
1183 #ifdef HAVE_trunchiqi2
1184 if (HAVE_trunchiqi2)
1186 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1194 if (from_mode == TImode && to_mode == DImode)
1196 #ifdef HAVE_trunctidi2
1197 if (HAVE_trunctidi2)
1199 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1207 if (from_mode == TImode && to_mode == SImode)
1209 #ifdef HAVE_trunctisi2
1210 if (HAVE_trunctisi2)
1212 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1220 if (from_mode == TImode && to_mode == HImode)
1222 #ifdef HAVE_trunctihi2
1223 if (HAVE_trunctihi2)
1225 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1233 if (from_mode == TImode && to_mode == QImode)
1235 #ifdef HAVE_trunctiqi2
1236 if (HAVE_trunctiqi2)
1238 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1246 /* Handle truncation of volatile memrefs, and so on;
1247 the things that couldn't be truncated directly,
1248 and for which there was no special instruction. */
1249 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1251 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1252 emit_move_insn (to, temp);
1256 /* Mode combination is not recognized. */
1260 /* Return an rtx for a value that would result
1261 from converting X to mode MODE.
1262 Both X and MODE may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1264 This can be done by referring to a part of X in place
1265 or by copying to a new temporary with conversion.
1267 This function *must not* call protect_from_queue
1268 except when putting X into an insn (in which case convert_move does it). */
1271 convert_to_mode (mode, x, unsignedp)
1272 enum machine_mode mode;
1276 return convert_modes (mode, VOIDmode, x, unsignedp);
1279 /* Return an rtx for a value that would result
1280 from converting X from mode OLDMODE to mode MODE.
1281 Both modes may be floating, or both integer.
1282 UNSIGNEDP is nonzero if X is an unsigned value.
1284 This can be done by referring to a part of X in place
1285 or by copying to a new temporary with conversion.
1287 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1289 This function *must not* call protect_from_queue
1290 except when putting X into an insn (in which case convert_move does it). */
1293 convert_modes (mode, oldmode, x, unsignedp)
1294 enum machine_mode mode, oldmode;
1300 /* If FROM is a SUBREG that indicates that we have already done at least
1301 the required extension, strip it. */
1303 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1304 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1305 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1306 x = gen_lowpart (mode, x);
1308 if (GET_MODE (x) != VOIDmode)
1309 oldmode = GET_MODE (x);
1311 if (mode == oldmode)
1314 /* There is one case that we must handle specially: If we are converting
1315 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1316 we are to interpret the constant as unsigned, gen_lowpart will do
1317 the wrong if the constant appears negative. What we want to do is
1318 make the high-order word of the constant zero, not all ones. */
1320 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1321 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1322 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1324 HOST_WIDE_INT val = INTVAL (x);
1326 if (oldmode != VOIDmode
1327 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1329 int width = GET_MODE_BITSIZE (oldmode);
1331 /* We need to zero extend VAL. */
1332 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1335 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1338 /* We can do this with a gen_lowpart if both desired and current modes
1339 are integer, and this is either a constant integer, a register, or a
1340 non-volatile MEM. Except for the constant case where MODE is no
1341 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1343 if ((GET_CODE (x) == CONST_INT
1344 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1345 || (GET_MODE_CLASS (mode) == MODE_INT
1346 && GET_MODE_CLASS (oldmode) == MODE_INT
1347 && (GET_CODE (x) == CONST_DOUBLE
1348 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1349 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1350 && direct_load[(int) mode])
1351 || (GET_CODE (x) == REG
1352 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1353 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1355 /* ?? If we don't know OLDMODE, we have to assume here that
1356 X does not need sign- or zero-extension. This may not be
1357 the case, but it's the best we can do. */
1358 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1359 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1361 HOST_WIDE_INT val = INTVAL (x);
1362 int width = GET_MODE_BITSIZE (oldmode);
1364 /* We must sign or zero-extend in this case. Start by
1365 zero-extending, then sign extend if we need to. */
1366 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1368 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1369 val |= (HOST_WIDE_INT) (-1) << width;
1371 return GEN_INT (trunc_int_for_mode (val, mode));
1374 return gen_lowpart (mode, x);
1377 temp = gen_reg_rtx (mode);
1378 convert_move (temp, x, unsignedp);
1382 /* This macro is used to determine what the largest unit size that
1383 move_by_pieces can use is. */
1385 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1386 move efficiently, as opposed to MOVE_MAX which is the maximum
1387 number of bytes we can move with a single instruction. */
1389 #ifndef MOVE_MAX_PIECES
1390 #define MOVE_MAX_PIECES MOVE_MAX
1393 /* Generate several move instructions to copy LEN bytes from block FROM to
1394 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1395 and TO through protect_from_queue before calling.
1397 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1398 used to push FROM to the stack.
1400 ALIGN is maximum alignment we can assume. */
1403 move_by_pieces (to, from, len, align)
1405 unsigned HOST_WIDE_INT len;
1408 struct move_by_pieces data;
1409 rtx to_addr, from_addr = XEXP (from, 0);
1410 unsigned int max_size = MOVE_MAX_PIECES + 1;
1411 enum machine_mode mode = VOIDmode, tmode;
1412 enum insn_code icode;
1415 data.from_addr = from_addr;
1418 to_addr = XEXP (to, 0);
1421 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1422 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1424 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1431 #ifdef STACK_GROWS_DOWNWARD
1437 data.to_addr = to_addr;
1440 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1441 || GET_CODE (from_addr) == POST_INC
1442 || GET_CODE (from_addr) == POST_DEC);
1444 data.explicit_inc_from = 0;
1445 data.explicit_inc_to = 0;
1446 if (data.reverse) data.offset = len;
1449 /* If copying requires more than two move insns,
1450 copy addresses to registers (to make displacements shorter)
1451 and use post-increment if available. */
1452 if (!(data.autinc_from && data.autinc_to)
1453 && move_by_pieces_ninsns (len, align) > 2)
1455 /* Find the mode of the largest move... */
1456 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1457 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1458 if (GET_MODE_SIZE (tmode) < max_size)
1461 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1463 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1464 data.autinc_from = 1;
1465 data.explicit_inc_from = -1;
1467 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1469 data.from_addr = copy_addr_to_reg (from_addr);
1470 data.autinc_from = 1;
1471 data.explicit_inc_from = 1;
1473 if (!data.autinc_from && CONSTANT_P (from_addr))
1474 data.from_addr = copy_addr_to_reg (from_addr);
1475 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1477 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1479 data.explicit_inc_to = -1;
1481 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1483 data.to_addr = copy_addr_to_reg (to_addr);
1485 data.explicit_inc_to = 1;
1487 if (!data.autinc_to && CONSTANT_P (to_addr))
1488 data.to_addr = copy_addr_to_reg (to_addr);
1491 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1492 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1493 align = MOVE_MAX * BITS_PER_UNIT;
1495 /* First move what we can in the largest integer mode, then go to
1496 successively smaller modes. */
1498 while (max_size > 1)
1500 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1501 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1502 if (GET_MODE_SIZE (tmode) < max_size)
1505 if (mode == VOIDmode)
1508 icode = mov_optab->handlers[(int) mode].insn_code;
1509 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1510 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1512 max_size = GET_MODE_SIZE (mode);
1515 /* The code above should have handled everything. */
1520 /* Return number of insns required to move L bytes by pieces.
1521 ALIGN (in bits) is maximum alignment we can assume. */
1523 static unsigned HOST_WIDE_INT
1524 move_by_pieces_ninsns (l, align)
1525 unsigned HOST_WIDE_INT l;
1528 unsigned HOST_WIDE_INT n_insns = 0;
1529 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1531 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1532 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1533 align = MOVE_MAX * BITS_PER_UNIT;
1535 while (max_size > 1)
1537 enum machine_mode mode = VOIDmode, tmode;
1538 enum insn_code icode;
1540 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1541 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1542 if (GET_MODE_SIZE (tmode) < max_size)
1545 if (mode == VOIDmode)
1548 icode = mov_optab->handlers[(int) mode].insn_code;
1549 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1550 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1552 max_size = GET_MODE_SIZE (mode);
1560 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1561 with move instructions for mode MODE. GENFUN is the gen_... function
1562 to make a move insn for that mode. DATA has all the other info. */
1565 move_by_pieces_1 (genfun, mode, data)
1566 rtx (*genfun) PARAMS ((rtx, ...));
1567 enum machine_mode mode;
1568 struct move_by_pieces *data;
1570 unsigned int size = GET_MODE_SIZE (mode);
1571 rtx to1 = NULL_RTX, from1;
1573 while (data->len >= size)
1576 data->offset -= size;
1580 if (data->autinc_to)
1582 to1 = replace_equiv_address (data->to, data->to_addr);
1583 to1 = adjust_address (to1, mode, 0);
1586 to1 = adjust_address (data->to, mode, data->offset);
1589 if (data->autinc_from)
1591 from1 = replace_equiv_address (data->from, data->from_addr);
1592 from1 = adjust_address (from1, mode, 0);
1595 from1 = adjust_address (data->from, mode, data->offset);
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1598 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1599 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1600 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1603 emit_insn ((*genfun) (to1, from1));
1606 #ifdef PUSH_ROUNDING
1607 emit_single_push_insn (mode, from1, NULL);
1613 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1614 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1615 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1616 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1618 if (! data->reverse)
1619 data->offset += size;
1625 /* Emit code to move a block Y to a block X.
1626 This may be done with string-move instructions,
1627 with multiple scalar move instructions, or with a library call.
1629 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1631 SIZE is an rtx that says how long they are.
1632 ALIGN is the maximum alignment we can assume they have.
1634 Return the address of the new block, if memcpy is called and returns it,
1638 emit_block_move (x, y, size)
1643 #ifdef TARGET_MEM_FUNCTIONS
1645 tree call_expr, arg_list;
1647 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1649 if (GET_MODE (x) != BLKmode)
1652 if (GET_MODE (y) != BLKmode)
1655 x = protect_from_queue (x, 1);
1656 y = protect_from_queue (y, 0);
1657 size = protect_from_queue (size, 0);
1659 if (GET_CODE (x) != MEM)
1661 if (GET_CODE (y) != MEM)
1666 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1667 move_by_pieces (x, y, INTVAL (size), align);
1670 /* Try the most limited insn first, because there's no point
1671 including more than one in the machine description unless
1672 the more limited one has some advantage. */
1674 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1675 enum machine_mode mode;
1677 /* Since this is a move insn, we don't care about volatility. */
1680 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1681 mode = GET_MODE_WIDER_MODE (mode))
1683 enum insn_code code = movstr_optab[(int) mode];
1684 insn_operand_predicate_fn pred;
1686 if (code != CODE_FOR_nothing
1687 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1688 here because if SIZE is less than the mode mask, as it is
1689 returned by the macro, it will definitely be less than the
1690 actual mode mask. */
1691 && ((GET_CODE (size) == CONST_INT
1692 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1693 <= (GET_MODE_MASK (mode) >> 1)))
1694 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1695 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1696 || (*pred) (x, BLKmode))
1697 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1698 || (*pred) (y, BLKmode))
1699 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1700 || (*pred) (opalign, VOIDmode)))
1703 rtx last = get_last_insn ();
1706 op2 = convert_to_mode (mode, size, 1);
1707 pred = insn_data[(int) code].operand[2].predicate;
1708 if (pred != 0 && ! (*pred) (op2, mode))
1709 op2 = copy_to_mode_reg (mode, op2);
1711 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1719 delete_insns_since (last);
1725 /* X, Y, or SIZE may have been passed through protect_from_queue.
1727 It is unsafe to save the value generated by protect_from_queue
1728 and reuse it later. Consider what happens if emit_queue is
1729 called before the return value from protect_from_queue is used.
1731 Expansion of the CALL_EXPR below will call emit_queue before
1732 we are finished emitting RTL for argument setup. So if we are
1733 not careful we could get the wrong value for an argument.
1735 To avoid this problem we go ahead and emit code to copy X, Y &
1736 SIZE into new pseudos. We can then place those new pseudos
1737 into an RTL_EXPR and use them later, even after a call to
1740 Note this is not strictly needed for library calls since they
1741 do not call emit_queue before loading their arguments. However,
1742 we may need to have library calls call emit_queue in the future
1743 since failing to do so could cause problems for targets which
1744 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1745 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1746 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1748 #ifdef TARGET_MEM_FUNCTIONS
1749 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1751 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1752 TREE_UNSIGNED (integer_type_node));
1753 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1756 #ifdef TARGET_MEM_FUNCTIONS
1757 /* It is incorrect to use the libcall calling conventions to call
1758 memcpy in this context.
1760 This could be a user call to memcpy and the user may wish to
1761 examine the return value from memcpy.
1763 For targets where libcalls and normal calls have different conventions
1764 for returning pointers, we could end up generating incorrect code.
1766 So instead of using a libcall sequence we build up a suitable
1767 CALL_EXPR and expand the call in the normal fashion. */
1768 if (fn == NULL_TREE)
1772 /* This was copied from except.c, I don't know if all this is
1773 necessary in this context or not. */
1774 fn = get_identifier ("memcpy");
1775 fntype = build_pointer_type (void_type_node);
1776 fntype = build_function_type (fntype, NULL_TREE);
1777 fn = build_decl (FUNCTION_DECL, fn, fntype);
1778 ggc_add_tree_root (&fn, 1);
1779 DECL_EXTERNAL (fn) = 1;
1780 TREE_PUBLIC (fn) = 1;
1781 DECL_ARTIFICIAL (fn) = 1;
1782 TREE_NOTHROW (fn) = 1;
1783 make_decl_rtl (fn, NULL);
1784 assemble_external (fn);
1787 /* We need to make an argument list for the function call.
1789 memcpy has three arguments, the first two are void * addresses and
1790 the last is a size_t byte count for the copy. */
1792 = build_tree_list (NULL_TREE,
1793 make_tree (build_pointer_type (void_type_node), x));
1794 TREE_CHAIN (arg_list)
1795 = build_tree_list (NULL_TREE,
1796 make_tree (build_pointer_type (void_type_node), y));
1797 TREE_CHAIN (TREE_CHAIN (arg_list))
1798 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1799 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1801 /* Now we have to build up the CALL_EXPR itself. */
1802 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1803 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1804 call_expr, arg_list, NULL_TREE);
1805 TREE_SIDE_EFFECTS (call_expr) = 1;
1807 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1809 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1810 VOIDmode, 3, y, Pmode, x, Pmode,
1811 convert_to_mode (TYPE_MODE (integer_type_node), size,
1812 TREE_UNSIGNED (integer_type_node)),
1813 TYPE_MODE (integer_type_node));
1820 /* Copy all or part of a value X into registers starting at REGNO.
1821 The number of registers to be filled is NREGS. */
1824 move_block_to_reg (regno, x, nregs, mode)
1828 enum machine_mode mode;
1831 #ifdef HAVE_load_multiple
1839 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1840 x = validize_mem (force_const_mem (mode, x));
1842 /* See if the machine can do this with a load multiple insn. */
1843 #ifdef HAVE_load_multiple
1844 if (HAVE_load_multiple)
1846 last = get_last_insn ();
1847 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1855 delete_insns_since (last);
1859 for (i = 0; i < nregs; i++)
1860 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1861 operand_subword_force (x, i, mode));
1864 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1865 The number of registers to be filled is NREGS. SIZE indicates the number
1866 of bytes in the object X. */
1869 move_block_from_reg (regno, x, nregs, size)
1876 #ifdef HAVE_store_multiple
1880 enum machine_mode mode;
1885 /* If SIZE is that of a mode no bigger than a word, just use that
1886 mode's store operation. */
1887 if (size <= UNITS_PER_WORD
1888 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1890 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1894 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1895 to the left before storing to memory. Note that the previous test
1896 doesn't handle all cases (e.g. SIZE == 3). */
1897 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1899 rtx tem = operand_subword (x, 0, 1, BLKmode);
1905 shift = expand_shift (LSHIFT_EXPR, word_mode,
1906 gen_rtx_REG (word_mode, regno),
1907 build_int_2 ((UNITS_PER_WORD - size)
1908 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1909 emit_move_insn (tem, shift);
1913 /* See if the machine can do this with a store multiple insn. */
1914 #ifdef HAVE_store_multiple
1915 if (HAVE_store_multiple)
1917 last = get_last_insn ();
1918 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1926 delete_insns_since (last);
1930 for (i = 0; i < nregs; i++)
1932 rtx tem = operand_subword (x, i, 1, BLKmode);
1937 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1941 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1942 registers represented by a PARALLEL. SSIZE represents the total size of
1943 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1945 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1946 the balance will be in what would be the low-order memory addresses, i.e.
1947 left justified for big endian, right justified for little endian. This
1948 happens to be true for the targets currently using this support. If this
1949 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1953 emit_group_load (dst, orig_src, ssize, align)
1961 if (GET_CODE (dst) != PARALLEL)
1964 /* Check for a NULL entry, used to indicate that the parameter goes
1965 both on the stack and in registers. */
1966 if (XEXP (XVECEXP (dst, 0, 0), 0))
1971 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1973 /* Process the pieces. */
1974 for (i = start; i < XVECLEN (dst, 0); i++)
1976 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1977 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1978 unsigned int bytelen = GET_MODE_SIZE (mode);
1981 /* Handle trailing fragments that run over the size of the struct. */
1982 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1984 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1985 bytelen = ssize - bytepos;
1990 /* If we won't be loading directly from memory, protect the real source
1991 from strange tricks we might play; but make sure that the source can
1992 be loaded directly into the destination. */
1994 if (GET_CODE (orig_src) != MEM
1995 && (!CONSTANT_P (orig_src)
1996 || (GET_MODE (orig_src) != mode
1997 && GET_MODE (orig_src) != VOIDmode)))
1999 if (GET_MODE (orig_src) == VOIDmode)
2000 src = gen_reg_rtx (mode);
2002 src = gen_reg_rtx (GET_MODE (orig_src));
2003 emit_move_insn (src, orig_src);
2006 /* Optimize the access just a bit. */
2007 if (GET_CODE (src) == MEM
2008 && align >= GET_MODE_ALIGNMENT (mode)
2009 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2010 && bytelen == GET_MODE_SIZE (mode))
2012 tmps[i] = gen_reg_rtx (mode);
2013 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2015 else if (GET_CODE (src) == CONCAT)
2018 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2019 tmps[i] = XEXP (src, 0);
2020 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2021 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2022 tmps[i] = XEXP (src, 1);
2026 else if (CONSTANT_P (src)
2027 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2030 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2031 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2032 mode, mode, align, ssize);
2034 if (BYTES_BIG_ENDIAN && shift)
2035 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2036 tmps[i], 0, OPTAB_WIDEN);
2041 /* Copy the extracted pieces into the proper (probable) hard regs. */
2042 for (i = start; i < XVECLEN (dst, 0); i++)
2043 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2046 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2047 registers represented by a PARALLEL. SSIZE represents the total size of
2048 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2051 emit_group_store (orig_dst, src, ssize, align)
2059 if (GET_CODE (src) != PARALLEL)
2062 /* Check for a NULL entry, used to indicate that the parameter goes
2063 both on the stack and in registers. */
2064 if (XEXP (XVECEXP (src, 0, 0), 0))
2069 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2071 /* Copy the (probable) hard regs into pseudos. */
2072 for (i = start; i < XVECLEN (src, 0); i++)
2074 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2075 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2076 emit_move_insn (tmps[i], reg);
2080 /* If we won't be storing directly into memory, protect the real destination
2081 from strange tricks we might play. */
2083 if (GET_CODE (dst) == PARALLEL)
2087 /* We can get a PARALLEL dst if there is a conditional expression in
2088 a return statement. In that case, the dst and src are the same,
2089 so no action is necessary. */
2090 if (rtx_equal_p (dst, src))
2093 /* It is unclear if we can ever reach here, but we may as well handle
2094 it. Allocate a temporary, and split this into a store/load to/from
2097 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2098 emit_group_store (temp, src, ssize, align);
2099 emit_group_load (dst, temp, ssize, align);
2102 else if (GET_CODE (dst) != MEM)
2104 dst = gen_reg_rtx (GET_MODE (orig_dst));
2105 /* Make life a bit easier for combine. */
2106 emit_move_insn (dst, const0_rtx);
2109 /* Process the pieces. */
2110 for (i = start; i < XVECLEN (src, 0); i++)
2112 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2113 enum machine_mode mode = GET_MODE (tmps[i]);
2114 unsigned int bytelen = GET_MODE_SIZE (mode);
2116 /* Handle trailing fragments that run over the size of the struct. */
2117 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2119 if (BYTES_BIG_ENDIAN)
2121 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2122 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2123 tmps[i], 0, OPTAB_WIDEN);
2125 bytelen = ssize - bytepos;
2128 /* Optimize the access just a bit. */
2129 if (GET_CODE (dst) == MEM
2130 && align >= GET_MODE_ALIGNMENT (mode)
2131 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2132 && bytelen == GET_MODE_SIZE (mode))
2133 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2135 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2136 mode, tmps[i], align, ssize);
2141 /* Copy from the pseudo into the (probable) hard reg. */
2142 if (GET_CODE (dst) == REG)
2143 emit_move_insn (orig_dst, dst);
2146 /* Generate code to copy a BLKmode object of TYPE out of a
2147 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2148 is null, a stack temporary is created. TGTBLK is returned.
2150 The primary purpose of this routine is to handle functions
2151 that return BLKmode structures in registers. Some machines
2152 (the PA for example) want to return all small structures
2153 in registers regardless of the structure's alignment. */
2156 copy_blkmode_from_reg (tgtblk, srcreg, type)
2161 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2162 rtx src = NULL, dst = NULL;
2163 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2164 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2168 tgtblk = assign_temp (build_qualified_type (type,
2170 | TYPE_QUAL_CONST)),
2172 preserve_temp_slots (tgtblk);
2175 /* This code assumes srcreg is at least a full word. If it isn't,
2176 copy it into a new pseudo which is a full word. */
2177 if (GET_MODE (srcreg) != BLKmode
2178 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2179 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2181 /* Structures whose size is not a multiple of a word are aligned
2182 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2183 machine, this means we must skip the empty high order bytes when
2184 calculating the bit offset. */
2185 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2186 big_endian_correction
2187 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2189 /* Copy the structure BITSIZE bites at a time.
2191 We could probably emit more efficient code for machines which do not use
2192 strict alignment, but it doesn't seem worth the effort at the current
2194 for (bitpos = 0, xbitpos = big_endian_correction;
2195 bitpos < bytes * BITS_PER_UNIT;
2196 bitpos += bitsize, xbitpos += bitsize)
2198 /* We need a new source operand each time xbitpos is on a
2199 word boundary and when xbitpos == big_endian_correction
2200 (the first time through). */
2201 if (xbitpos % BITS_PER_WORD == 0
2202 || xbitpos == big_endian_correction)
2203 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2206 /* We need a new destination operand each time bitpos is on
2208 if (bitpos % BITS_PER_WORD == 0)
2209 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2211 /* Use xbitpos for the source extraction (right justified) and
2212 xbitpos for the destination store (left justified). */
2213 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2214 extract_bit_field (src, bitsize,
2215 xbitpos % BITS_PER_WORD, 1,
2216 NULL_RTX, word_mode, word_mode,
2217 bitsize, BITS_PER_WORD),
2218 bitsize, BITS_PER_WORD);
2224 /* Add a USE expression for REG to the (possibly empty) list pointed
2225 to by CALL_FUSAGE. REG must denote a hard register. */
2228 use_reg (call_fusage, reg)
2229 rtx *call_fusage, reg;
2231 if (GET_CODE (reg) != REG
2232 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2236 = gen_rtx_EXPR_LIST (VOIDmode,
2237 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2240 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2241 starting at REGNO. All of these registers must be hard registers. */
2244 use_regs (call_fusage, regno, nregs)
2251 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2254 for (i = 0; i < nregs; i++)
2255 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2258 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2259 PARALLEL REGS. This is for calls that pass values in multiple
2260 non-contiguous locations. The Irix 6 ABI has examples of this. */
2263 use_group_regs (call_fusage, regs)
2269 for (i = 0; i < XVECLEN (regs, 0); i++)
2271 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2273 /* A NULL entry means the parameter goes both on the stack and in
2274 registers. This can also be a MEM for targets that pass values
2275 partially on the stack and partially in registers. */
2276 if (reg != 0 && GET_CODE (reg) == REG)
2277 use_reg (call_fusage, reg);
2283 can_store_by_pieces (len, constfun, constfundata, align)
2284 unsigned HOST_WIDE_INT len;
2285 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2289 unsigned HOST_WIDE_INT max_size, l;
2290 HOST_WIDE_INT offset = 0;
2291 enum machine_mode mode, tmode;
2292 enum insn_code icode;
2296 if (! MOVE_BY_PIECES_P (len, align))
2299 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2300 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2301 align = MOVE_MAX * BITS_PER_UNIT;
2303 /* We would first store what we can in the largest integer mode, then go to
2304 successively smaller modes. */
2307 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2312 max_size = MOVE_MAX_PIECES + 1;
2313 while (max_size > 1)
2315 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2316 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2317 if (GET_MODE_SIZE (tmode) < max_size)
2320 if (mode == VOIDmode)
2323 icode = mov_optab->handlers[(int) mode].insn_code;
2324 if (icode != CODE_FOR_nothing
2325 && align >= GET_MODE_ALIGNMENT (mode))
2327 unsigned int size = GET_MODE_SIZE (mode);
2334 cst = (*constfun) (constfundata, offset, mode);
2335 if (!LEGITIMATE_CONSTANT_P (cst))
2345 max_size = GET_MODE_SIZE (mode);
2348 /* The code above should have handled everything. */
2356 /* Generate several move instructions to store LEN bytes generated by
2357 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2358 pointer which will be passed as argument in every CONSTFUN call.
2359 ALIGN is maximum alignment we can assume. */
2362 store_by_pieces (to, len, constfun, constfundata, align)
2364 unsigned HOST_WIDE_INT len;
2365 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2369 struct store_by_pieces data;
2371 if (! MOVE_BY_PIECES_P (len, align))
2373 to = protect_from_queue (to, 1);
2374 data.constfun = constfun;
2375 data.constfundata = constfundata;
2378 store_by_pieces_1 (&data, align);
2381 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2382 rtx with BLKmode). The caller must pass TO through protect_from_queue
2383 before calling. ALIGN is maximum alignment we can assume. */
2386 clear_by_pieces (to, len, align)
2388 unsigned HOST_WIDE_INT len;
2391 struct store_by_pieces data;
2393 data.constfun = clear_by_pieces_1;
2394 data.constfundata = NULL;
2397 store_by_pieces_1 (&data, align);
2400 /* Callback routine for clear_by_pieces.
2401 Return const0_rtx unconditionally. */
2404 clear_by_pieces_1 (data, offset, mode)
2405 PTR data ATTRIBUTE_UNUSED;
2406 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2407 enum machine_mode mode ATTRIBUTE_UNUSED;
2412 /* Subroutine of clear_by_pieces and store_by_pieces.
2413 Generate several move instructions to store LEN bytes of block TO. (A MEM
2414 rtx with BLKmode). The caller must pass TO through protect_from_queue
2415 before calling. ALIGN is maximum alignment we can assume. */
2418 store_by_pieces_1 (data, align)
2419 struct store_by_pieces *data;
2422 rtx to_addr = XEXP (data->to, 0);
2423 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2424 enum machine_mode mode = VOIDmode, tmode;
2425 enum insn_code icode;
2428 data->to_addr = to_addr;
2430 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2431 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2433 data->explicit_inc_to = 0;
2435 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2437 data->offset = data->len;
2439 /* If storing requires more than two move insns,
2440 copy addresses to registers (to make displacements shorter)
2441 and use post-increment if available. */
2442 if (!data->autinc_to
2443 && move_by_pieces_ninsns (data->len, align) > 2)
2445 /* Determine the main mode we'll be using. */
2446 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2447 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2448 if (GET_MODE_SIZE (tmode) < max_size)
2451 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2453 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2454 data->autinc_to = 1;
2455 data->explicit_inc_to = -1;
2458 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2459 && ! data->autinc_to)
2461 data->to_addr = copy_addr_to_reg (to_addr);
2462 data->autinc_to = 1;
2463 data->explicit_inc_to = 1;
2466 if ( !data->autinc_to && CONSTANT_P (to_addr))
2467 data->to_addr = copy_addr_to_reg (to_addr);
2470 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2471 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2472 align = MOVE_MAX * BITS_PER_UNIT;
2474 /* First store what we can in the largest integer mode, then go to
2475 successively smaller modes. */
2477 while (max_size > 1)
2479 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2480 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2481 if (GET_MODE_SIZE (tmode) < max_size)
2484 if (mode == VOIDmode)
2487 icode = mov_optab->handlers[(int) mode].insn_code;
2488 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2489 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2491 max_size = GET_MODE_SIZE (mode);
2494 /* The code above should have handled everything. */
2499 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2500 with move instructions for mode MODE. GENFUN is the gen_... function
2501 to make a move insn for that mode. DATA has all the other info. */
2504 store_by_pieces_2 (genfun, mode, data)
2505 rtx (*genfun) PARAMS ((rtx, ...));
2506 enum machine_mode mode;
2507 struct store_by_pieces *data;
2509 unsigned int size = GET_MODE_SIZE (mode);
2512 while (data->len >= size)
2515 data->offset -= size;
2517 if (data->autinc_to)
2519 to1 = replace_equiv_address (data->to, data->to_addr);
2520 to1 = adjust_address (to1, mode, 0);
2523 to1 = adjust_address (data->to, mode, data->offset);
2525 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2526 emit_insn (gen_add2_insn (data->to_addr,
2527 GEN_INT (-(HOST_WIDE_INT) size)));
2529 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2530 emit_insn ((*genfun) (to1, cst));
2532 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2533 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2535 if (! data->reverse)
2536 data->offset += size;
2542 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2543 its length in bytes. */
2546 clear_storage (object, size)
2550 #ifdef TARGET_MEM_FUNCTIONS
2552 tree call_expr, arg_list;
2555 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2556 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2558 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2559 just move a zero. Otherwise, do this a piece at a time. */
2560 if (GET_MODE (object) != BLKmode
2561 && GET_CODE (size) == CONST_INT
2562 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2563 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2566 object = protect_from_queue (object, 1);
2567 size = protect_from_queue (size, 0);
2569 if (GET_CODE (size) == CONST_INT
2570 && MOVE_BY_PIECES_P (INTVAL (size), align))
2571 clear_by_pieces (object, INTVAL (size), align);
2574 /* Try the most limited insn first, because there's no point
2575 including more than one in the machine description unless
2576 the more limited one has some advantage. */
2578 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2579 enum machine_mode mode;
2581 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2582 mode = GET_MODE_WIDER_MODE (mode))
2584 enum insn_code code = clrstr_optab[(int) mode];
2585 insn_operand_predicate_fn pred;
2587 if (code != CODE_FOR_nothing
2588 /* We don't need MODE to be narrower than
2589 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2590 the mode mask, as it is returned by the macro, it will
2591 definitely be less than the actual mode mask. */
2592 && ((GET_CODE (size) == CONST_INT
2593 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2594 <= (GET_MODE_MASK (mode) >> 1)))
2595 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2596 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2597 || (*pred) (object, BLKmode))
2598 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2599 || (*pred) (opalign, VOIDmode)))
2602 rtx last = get_last_insn ();
2605 op1 = convert_to_mode (mode, size, 1);
2606 pred = insn_data[(int) code].operand[1].predicate;
2607 if (pred != 0 && ! (*pred) (op1, mode))
2608 op1 = copy_to_mode_reg (mode, op1);
2610 pat = GEN_FCN ((int) code) (object, op1, opalign);
2617 delete_insns_since (last);
2621 /* OBJECT or SIZE may have been passed through protect_from_queue.
2623 It is unsafe to save the value generated by protect_from_queue
2624 and reuse it later. Consider what happens if emit_queue is
2625 called before the return value from protect_from_queue is used.
2627 Expansion of the CALL_EXPR below will call emit_queue before
2628 we are finished emitting RTL for argument setup. So if we are
2629 not careful we could get the wrong value for an argument.
2631 To avoid this problem we go ahead and emit code to copy OBJECT
2632 and SIZE into new pseudos. We can then place those new pseudos
2633 into an RTL_EXPR and use them later, even after a call to
2636 Note this is not strictly needed for library calls since they
2637 do not call emit_queue before loading their arguments. However,
2638 we may need to have library calls call emit_queue in the future
2639 since failing to do so could cause problems for targets which
2640 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2641 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2643 #ifdef TARGET_MEM_FUNCTIONS
2644 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2646 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2647 TREE_UNSIGNED (integer_type_node));
2648 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2651 #ifdef TARGET_MEM_FUNCTIONS
2652 /* It is incorrect to use the libcall calling conventions to call
2653 memset in this context.
2655 This could be a user call to memset and the user may wish to
2656 examine the return value from memset.
2658 For targets where libcalls and normal calls have different
2659 conventions for returning pointers, we could end up generating
2662 So instead of using a libcall sequence we build up a suitable
2663 CALL_EXPR and expand the call in the normal fashion. */
2664 if (fn == NULL_TREE)
2668 /* This was copied from except.c, I don't know if all this is
2669 necessary in this context or not. */
2670 fn = get_identifier ("memset");
2671 fntype = build_pointer_type (void_type_node);
2672 fntype = build_function_type (fntype, NULL_TREE);
2673 fn = build_decl (FUNCTION_DECL, fn, fntype);
2674 ggc_add_tree_root (&fn, 1);
2675 DECL_EXTERNAL (fn) = 1;
2676 TREE_PUBLIC (fn) = 1;
2677 DECL_ARTIFICIAL (fn) = 1;
2678 TREE_NOTHROW (fn) = 1;
2679 make_decl_rtl (fn, NULL);
2680 assemble_external (fn);
2683 /* We need to make an argument list for the function call.
2685 memset has three arguments, the first is a void * addresses, the
2686 second an integer with the initialization value, the last is a
2687 size_t byte count for the copy. */
2689 = build_tree_list (NULL_TREE,
2690 make_tree (build_pointer_type (void_type_node),
2692 TREE_CHAIN (arg_list)
2693 = build_tree_list (NULL_TREE,
2694 make_tree (integer_type_node, const0_rtx));
2695 TREE_CHAIN (TREE_CHAIN (arg_list))
2696 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2697 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2699 /* Now we have to build up the CALL_EXPR itself. */
2700 call_expr = build1 (ADDR_EXPR,
2701 build_pointer_type (TREE_TYPE (fn)), fn);
2702 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2703 call_expr, arg_list, NULL_TREE);
2704 TREE_SIDE_EFFECTS (call_expr) = 1;
2706 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2708 emit_library_call (bzero_libfunc, LCT_NORMAL,
2709 VOIDmode, 2, object, Pmode, size,
2710 TYPE_MODE (integer_type_node));
2718 /* Generate code to copy Y into X.
2719 Both Y and X must have the same mode, except that
2720 Y can be a constant with VOIDmode.
2721 This mode cannot be BLKmode; use emit_block_move for that.
2723 Return the last instruction emitted. */
2726 emit_move_insn (x, y)
2729 enum machine_mode mode = GET_MODE (x);
2730 rtx y_cst = NULL_RTX;
2733 x = protect_from_queue (x, 1);
2734 y = protect_from_queue (y, 0);
2736 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2739 /* Never force constant_p_rtx to memory. */
2740 if (GET_CODE (y) == CONSTANT_P_RTX)
2742 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2745 y = force_const_mem (mode, y);
2748 /* If X or Y are memory references, verify that their addresses are valid
2750 if (GET_CODE (x) == MEM
2751 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2752 && ! push_operand (x, GET_MODE (x)))
2754 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2755 x = validize_mem (x);
2757 if (GET_CODE (y) == MEM
2758 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2760 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2761 y = validize_mem (y);
2763 if (mode == BLKmode)
2766 last_insn = emit_move_insn_1 (x, y);
2768 if (y_cst && GET_CODE (x) == REG)
2769 REG_NOTES (last_insn)
2770 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2775 /* Low level part of emit_move_insn.
2776 Called just like emit_move_insn, but assumes X and Y
2777 are basically valid. */
2780 emit_move_insn_1 (x, y)
2783 enum machine_mode mode = GET_MODE (x);
2784 enum machine_mode submode;
2785 enum mode_class class = GET_MODE_CLASS (mode);
2788 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2791 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2793 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2795 /* Expand complex moves by moving real part and imag part, if possible. */
2796 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2797 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2799 (class == MODE_COMPLEX_INT
2800 ? MODE_INT : MODE_FLOAT),
2802 && (mov_optab->handlers[(int) submode].insn_code
2803 != CODE_FOR_nothing))
2805 /* Don't split destination if it is a stack push. */
2806 int stack = push_operand (x, GET_MODE (x));
2808 #ifdef PUSH_ROUNDING
2809 /* In case we output to the stack, but the size is smaller machine can
2810 push exactly, we need to use move instructions. */
2812 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2815 int offset1, offset2;
2817 /* Do not use anti_adjust_stack, since we don't want to update
2818 stack_pointer_delta. */
2819 temp = expand_binop (Pmode,
2820 #ifdef STACK_GROWS_DOWNWARD
2827 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2831 if (temp != stack_pointer_rtx)
2832 emit_move_insn (stack_pointer_rtx, temp);
2833 #ifdef STACK_GROWS_DOWNWARD
2835 offset2 = GET_MODE_SIZE (submode);
2837 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2838 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2839 + GET_MODE_SIZE (submode));
2841 emit_move_insn (change_address (x, submode,
2842 gen_rtx_PLUS (Pmode,
2844 GEN_INT (offset1))),
2845 gen_realpart (submode, y));
2846 emit_move_insn (change_address (x, submode,
2847 gen_rtx_PLUS (Pmode,
2849 GEN_INT (offset2))),
2850 gen_imagpart (submode, y));
2854 /* If this is a stack, push the highpart first, so it
2855 will be in the argument order.
2857 In that case, change_address is used only to convert
2858 the mode, not to change the address. */
2861 /* Note that the real part always precedes the imag part in memory
2862 regardless of machine's endianness. */
2863 #ifdef STACK_GROWS_DOWNWARD
2864 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2865 (gen_rtx_MEM (submode, XEXP (x, 0)),
2866 gen_imagpart (submode, y)));
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_realpart (submode, y)));
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2872 (gen_rtx_MEM (submode, XEXP (x, 0)),
2873 gen_realpart (submode, y)));
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_imagpart (submode, y)));
2881 rtx realpart_x, realpart_y;
2882 rtx imagpart_x, imagpart_y;
2884 /* If this is a complex value with each part being smaller than a
2885 word, the usual calling sequence will likely pack the pieces into
2886 a single register. Unfortunately, SUBREG of hard registers only
2887 deals in terms of words, so we have a problem converting input
2888 arguments to the CONCAT of two registers that is used elsewhere
2889 for complex values. If this is before reload, we can copy it into
2890 memory and reload. FIXME, we should see about using extract and
2891 insert on integer registers, but complex short and complex char
2892 variables should be rarely used. */
2893 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2894 && (reload_in_progress | reload_completed) == 0)
2896 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2897 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2899 if (packed_dest_p || packed_src_p)
2901 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2902 ? MODE_FLOAT : MODE_INT);
2904 enum machine_mode reg_mode
2905 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2907 if (reg_mode != BLKmode)
2909 rtx mem = assign_stack_temp (reg_mode,
2910 GET_MODE_SIZE (mode), 0);
2911 rtx cmem = adjust_address (mem, mode, 0);
2914 = N_("function using short complex types cannot be inline");
2918 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2919 emit_move_insn_1 (cmem, y);
2920 return emit_move_insn_1 (sreg, mem);
2924 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2925 emit_move_insn_1 (mem, sreg);
2926 return emit_move_insn_1 (x, cmem);
2932 realpart_x = gen_realpart (submode, x);
2933 realpart_y = gen_realpart (submode, y);
2934 imagpart_x = gen_imagpart (submode, x);
2935 imagpart_y = gen_imagpart (submode, y);
2937 /* Show the output dies here. This is necessary for SUBREGs
2938 of pseudos since we cannot track their lifetimes correctly;
2939 hard regs shouldn't appear here except as return values.
2940 We never want to emit such a clobber after reload. */
2942 && ! (reload_in_progress || reload_completed)
2943 && (GET_CODE (realpart_x) == SUBREG
2944 || GET_CODE (imagpart_x) == SUBREG))
2946 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2949 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2950 (realpart_x, realpart_y));
2951 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2952 (imagpart_x, imagpart_y));
2955 return get_last_insn ();
2958 /* This will handle any multi-word mode that lacks a move_insn pattern.
2959 However, you will get better code if you define such patterns,
2960 even if they must turn into multiple assembler instructions. */
2961 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2967 #ifdef PUSH_ROUNDING
2969 /* If X is a push on the stack, do the push now and replace
2970 X with a reference to the stack pointer. */
2971 if (push_operand (x, GET_MODE (x)))
2976 /* Do not use anti_adjust_stack, since we don't want to update
2977 stack_pointer_delta. */
2978 temp = expand_binop (Pmode,
2979 #ifdef STACK_GROWS_DOWNWARD
2986 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2990 if (temp != stack_pointer_rtx)
2991 emit_move_insn (stack_pointer_rtx, temp);
2993 code = GET_CODE (XEXP (x, 0));
2994 /* Just hope that small offsets off SP are OK. */
2995 if (code == POST_INC)
2996 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2997 GEN_INT (-(HOST_WIDE_INT)
2998 GET_MODE_SIZE (GET_MODE (x))));
2999 else if (code == POST_DEC)
3000 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3001 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3003 temp = stack_pointer_rtx;
3005 x = change_address (x, VOIDmode, temp);
3009 /* If we are in reload, see if either operand is a MEM whose address
3010 is scheduled for replacement. */
3011 if (reload_in_progress && GET_CODE (x) == MEM
3012 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3013 x = replace_equiv_address_nv (x, inner);
3014 if (reload_in_progress && GET_CODE (y) == MEM
3015 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3016 y = replace_equiv_address_nv (y, inner);
3022 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3025 rtx xpart = operand_subword (x, i, 1, mode);
3026 rtx ypart = operand_subword (y, i, 1, mode);
3028 /* If we can't get a part of Y, put Y into memory if it is a
3029 constant. Otherwise, force it into a register. If we still
3030 can't get a part of Y, abort. */
3031 if (ypart == 0 && CONSTANT_P (y))
3033 y = force_const_mem (mode, y);
3034 ypart = operand_subword (y, i, 1, mode);
3036 else if (ypart == 0)
3037 ypart = operand_subword_force (y, i, mode);
3039 if (xpart == 0 || ypart == 0)
3042 need_clobber |= (GET_CODE (xpart) == SUBREG);
3044 last_insn = emit_move_insn (xpart, ypart);
3047 seq = gen_sequence ();
3050 /* Show the output dies here. This is necessary for SUBREGs
3051 of pseudos since we cannot track their lifetimes correctly;
3052 hard regs shouldn't appear here except as return values.
3053 We never want to emit such a clobber after reload. */
3055 && ! (reload_in_progress || reload_completed)
3056 && need_clobber != 0)
3058 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3069 /* Pushing data onto the stack. */
3071 /* Push a block of length SIZE (perhaps variable)
3072 and return an rtx to address the beginning of the block.
3073 Note that it is not possible for the value returned to be a QUEUED.
3074 The value may be virtual_outgoing_args_rtx.
3076 EXTRA is the number of bytes of padding to push in addition to SIZE.
3077 BELOW nonzero means this padding comes at low addresses;
3078 otherwise, the padding comes at high addresses. */
3081 push_block (size, extra, below)
3087 size = convert_modes (Pmode, ptr_mode, size, 1);
3088 if (CONSTANT_P (size))
3089 anti_adjust_stack (plus_constant (size, extra));
3090 else if (GET_CODE (size) == REG && extra == 0)
3091 anti_adjust_stack (size);
3094 temp = copy_to_mode_reg (Pmode, size);
3096 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3097 temp, 0, OPTAB_LIB_WIDEN);
3098 anti_adjust_stack (temp);
3101 #ifndef STACK_GROWS_DOWNWARD
3107 temp = virtual_outgoing_args_rtx;
3108 if (extra != 0 && below)
3109 temp = plus_constant (temp, extra);
3113 if (GET_CODE (size) == CONST_INT)
3114 temp = plus_constant (virtual_outgoing_args_rtx,
3115 -INTVAL (size) - (below ? 0 : extra));
3116 else if (extra != 0 && !below)
3117 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3118 negate_rtx (Pmode, plus_constant (size, extra)));
3120 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3121 negate_rtx (Pmode, size));
3124 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3128 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3129 block of SIZE bytes. */
3132 get_push_address (size)
3137 if (STACK_PUSH_CODE == POST_DEC)
3138 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3139 else if (STACK_PUSH_CODE == POST_INC)
3140 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3142 temp = stack_pointer_rtx;
3144 return copy_to_reg (temp);
3147 #ifdef PUSH_ROUNDING
3149 /* Emit single push insn. */
3152 emit_single_push_insn (mode, x, type)
3154 enum machine_mode mode;
3158 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3160 enum insn_code icode;
3161 insn_operand_predicate_fn pred;
3163 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3164 /* If there is push pattern, use it. Otherwise try old way of throwing
3165 MEM representing push operation to move expander. */
3166 icode = push_optab->handlers[(int) mode].insn_code;
3167 if (icode != CODE_FOR_nothing)
3169 if (((pred = insn_data[(int) icode].operand[0].predicate)
3170 && !((*pred) (x, mode))))
3171 x = force_reg (mode, x);
3172 emit_insn (GEN_FCN (icode) (x));
3175 if (GET_MODE_SIZE (mode) == rounded_size)
3176 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3179 #ifdef STACK_GROWS_DOWNWARD
3180 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3181 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3183 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3184 GEN_INT (rounded_size));
3186 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3189 dest = gen_rtx_MEM (mode, dest_addr);
3193 set_mem_attributes (dest, type, 1);
3194 /* Function incoming arguments may overlap with sibling call
3195 outgoing arguments and we cannot allow reordering of reads
3196 from function arguments with stores to outgoing arguments
3197 of sibling calls. */
3198 set_mem_alias_set (dest, 0);
3200 emit_move_insn (dest, x);
3204 /* Generate code to push X onto the stack, assuming it has mode MODE and
3206 MODE is redundant except when X is a CONST_INT (since they don't
3208 SIZE is an rtx for the size of data to be copied (in bytes),
3209 needed only if X is BLKmode.
3211 ALIGN (in bits) is maximum alignment we can assume.
3213 If PARTIAL and REG are both nonzero, then copy that many of the first
3214 words of X into registers starting with REG, and push the rest of X.
3215 The amount of space pushed is decreased by PARTIAL words,
3216 rounded *down* to a multiple of PARM_BOUNDARY.
3217 REG must be a hard register in this case.
3218 If REG is zero but PARTIAL is not, take any all others actions for an
3219 argument partially in registers, but do not actually load any
3222 EXTRA is the amount in bytes of extra space to leave next to this arg.
3223 This is ignored if an argument block has already been allocated.
3225 On a machine that lacks real push insns, ARGS_ADDR is the address of
3226 the bottom of the argument block for this call. We use indexing off there
3227 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3228 argument block has not been preallocated.
3230 ARGS_SO_FAR is the size of args previously pushed for this call.
3232 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3233 for arguments passed in registers. If nonzero, it will be the number
3234 of bytes required. */
3237 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3238 args_addr, args_so_far, reg_parm_stack_space,
3241 enum machine_mode mode;
3250 int reg_parm_stack_space;
3254 enum direction stack_direction
3255 #ifdef STACK_GROWS_DOWNWARD
3261 /* Decide where to pad the argument: `downward' for below,
3262 `upward' for above, or `none' for don't pad it.
3263 Default is below for small data on big-endian machines; else above. */
3264 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3266 /* Invert direction if stack is post-decrement.
3268 if (STACK_PUSH_CODE == POST_DEC)
3269 if (where_pad != none)
3270 where_pad = (where_pad == downward ? upward : downward);
3272 xinner = x = protect_from_queue (x, 0);
3274 if (mode == BLKmode)
3276 /* Copy a block into the stack, entirely or partially. */
3279 int used = partial * UNITS_PER_WORD;
3280 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3288 /* USED is now the # of bytes we need not copy to the stack
3289 because registers will take care of them. */
3292 xinner = adjust_address (xinner, BLKmode, used);
3294 /* If the partial register-part of the arg counts in its stack size,
3295 skip the part of stack space corresponding to the registers.
3296 Otherwise, start copying to the beginning of the stack space,
3297 by setting SKIP to 0. */
3298 skip = (reg_parm_stack_space == 0) ? 0 : used;
3300 #ifdef PUSH_ROUNDING
3301 /* Do it with several push insns if that doesn't take lots of insns
3302 and if there is no difficulty with push insns that skip bytes
3303 on the stack for alignment purposes. */
3306 && GET_CODE (size) == CONST_INT
3308 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3309 /* Here we avoid the case of a structure whose weak alignment
3310 forces many pushes of a small amount of data,
3311 and such small pushes do rounding that causes trouble. */
3312 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3313 || align >= BIGGEST_ALIGNMENT
3314 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3315 == (align / BITS_PER_UNIT)))
3316 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3318 /* Push padding now if padding above and stack grows down,
3319 or if padding below and stack grows up.
3320 But if space already allocated, this has already been done. */
3321 if (extra && args_addr == 0
3322 && where_pad != none && where_pad != stack_direction)
3323 anti_adjust_stack (GEN_INT (extra));
3325 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3327 if (current_function_check_memory_usage && ! in_check_memory_usage)
3331 in_check_memory_usage = 1;
3332 temp = get_push_address (INTVAL (size) - used);
3333 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3334 emit_library_call (chkr_copy_bitmap_libfunc,
3335 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3336 Pmode, XEXP (xinner, 0), Pmode,
3337 GEN_INT (INTVAL (size) - used),
3338 TYPE_MODE (sizetype));
3340 emit_library_call (chkr_set_right_libfunc,
3341 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3342 Pmode, GEN_INT (INTVAL (size) - used),
3343 TYPE_MODE (sizetype),
3344 GEN_INT (MEMORY_USE_RW),
3345 TYPE_MODE (integer_type_node));
3346 in_check_memory_usage = 0;
3350 #endif /* PUSH_ROUNDING */
3354 /* Otherwise make space on the stack and copy the data
3355 to the address of that space. */
3357 /* Deduct words put into registers from the size we must copy. */
3360 if (GET_CODE (size) == CONST_INT)
3361 size = GEN_INT (INTVAL (size) - used);
3363 size = expand_binop (GET_MODE (size), sub_optab, size,
3364 GEN_INT (used), NULL_RTX, 0,
3368 /* Get the address of the stack space.
3369 In this case, we do not deal with EXTRA separately.
3370 A single stack adjust will do. */
3373 temp = push_block (size, extra, where_pad == downward);
3376 else if (GET_CODE (args_so_far) == CONST_INT)
3377 temp = memory_address (BLKmode,
3378 plus_constant (args_addr,
3379 skip + INTVAL (args_so_far)));
3381 temp = memory_address (BLKmode,
3382 plus_constant (gen_rtx_PLUS (Pmode,
3386 if (current_function_check_memory_usage && ! in_check_memory_usage)
3388 in_check_memory_usage = 1;
3389 target = copy_to_reg (temp);
3390 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3391 emit_library_call (chkr_copy_bitmap_libfunc,
3392 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3394 XEXP (xinner, 0), Pmode,
3395 size, TYPE_MODE (sizetype));
3397 emit_library_call (chkr_set_right_libfunc,
3398 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3400 size, TYPE_MODE (sizetype),
3401 GEN_INT (MEMORY_USE_RW),
3402 TYPE_MODE (integer_type_node));
3403 in_check_memory_usage = 0;
3406 target = gen_rtx_MEM (BLKmode, temp);
3410 set_mem_attributes (target, type, 1);
3411 /* Function incoming arguments may overlap with sibling call
3412 outgoing arguments and we cannot allow reordering of reads
3413 from function arguments with stores to outgoing arguments
3414 of sibling calls. */
3415 set_mem_alias_set (target, 0);
3418 set_mem_align (target, align);
3420 /* TEMP is the address of the block. Copy the data there. */
3421 if (GET_CODE (size) == CONST_INT
3422 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3424 move_by_pieces (target, xinner, INTVAL (size), align);
3429 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3430 enum machine_mode mode;
3432 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3434 mode = GET_MODE_WIDER_MODE (mode))
3436 enum insn_code code = movstr_optab[(int) mode];
3437 insn_operand_predicate_fn pred;
3439 if (code != CODE_FOR_nothing
3440 && ((GET_CODE (size) == CONST_INT
3441 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3442 <= (GET_MODE_MASK (mode) >> 1)))
3443 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3444 && (!(pred = insn_data[(int) code].operand[0].predicate)
3445 || ((*pred) (target, BLKmode)))
3446 && (!(pred = insn_data[(int) code].operand[1].predicate)
3447 || ((*pred) (xinner, BLKmode)))
3448 && (!(pred = insn_data[(int) code].operand[3].predicate)
3449 || ((*pred) (opalign, VOIDmode))))
3451 rtx op2 = convert_to_mode (mode, size, 1);
3452 rtx last = get_last_insn ();
3455 pred = insn_data[(int) code].operand[2].predicate;
3456 if (pred != 0 && ! (*pred) (op2, mode))
3457 op2 = copy_to_mode_reg (mode, op2);
3459 pat = GEN_FCN ((int) code) (target, xinner,
3467 delete_insns_since (last);
3472 if (!ACCUMULATE_OUTGOING_ARGS)
3474 /* If the source is referenced relative to the stack pointer,
3475 copy it to another register to stabilize it. We do not need
3476 to do this if we know that we won't be changing sp. */
3478 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3479 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3480 temp = copy_to_reg (temp);
3483 /* Make inhibit_defer_pop nonzero around the library call
3484 to force it to pop the bcopy-arguments right away. */
3486 #ifdef TARGET_MEM_FUNCTIONS
3487 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3488 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3489 convert_to_mode (TYPE_MODE (sizetype),
3490 size, TREE_UNSIGNED (sizetype)),
3491 TYPE_MODE (sizetype));
3493 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3494 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3495 convert_to_mode (TYPE_MODE (integer_type_node),
3497 TREE_UNSIGNED (integer_type_node)),
3498 TYPE_MODE (integer_type_node));
3503 else if (partial > 0)
3505 /* Scalar partly in registers. */
3507 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3510 /* # words of start of argument
3511 that we must make space for but need not store. */
3512 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3513 int args_offset = INTVAL (args_so_far);
3516 /* Push padding now if padding above and stack grows down,
3517 or if padding below and stack grows up.
3518 But if space already allocated, this has already been done. */
3519 if (extra && args_addr == 0
3520 && where_pad != none && where_pad != stack_direction)
3521 anti_adjust_stack (GEN_INT (extra));
3523 /* If we make space by pushing it, we might as well push
3524 the real data. Otherwise, we can leave OFFSET nonzero
3525 and leave the space uninitialized. */
3529 /* Now NOT_STACK gets the number of words that we don't need to
3530 allocate on the stack. */
3531 not_stack = partial - offset;
3533 /* If the partial register-part of the arg counts in its stack size,
3534 skip the part of stack space corresponding to the registers.
3535 Otherwise, start copying to the beginning of the stack space,
3536 by setting SKIP to 0. */
3537 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3539 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3540 x = validize_mem (force_const_mem (mode, x));
3542 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3543 SUBREGs of such registers are not allowed. */
3544 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3545 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3546 x = copy_to_reg (x);
3548 /* Loop over all the words allocated on the stack for this arg. */
3549 /* We can do it by words, because any scalar bigger than a word
3550 has a size a multiple of a word. */
3551 #ifndef PUSH_ARGS_REVERSED
3552 for (i = not_stack; i < size; i++)
3554 for (i = size - 1; i >= not_stack; i--)
3556 if (i >= not_stack + offset)
3557 emit_push_insn (operand_subword_force (x, i, mode),
3558 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3560 GEN_INT (args_offset + ((i - not_stack + skip)
3562 reg_parm_stack_space, alignment_pad);
3567 rtx target = NULL_RTX;
3570 /* Push padding now if padding above and stack grows down,
3571 or if padding below and stack grows up.
3572 But if space already allocated, this has already been done. */
3573 if (extra && args_addr == 0
3574 && where_pad != none && where_pad != stack_direction)
3575 anti_adjust_stack (GEN_INT (extra));
3577 #ifdef PUSH_ROUNDING
3578 if (args_addr == 0 && PUSH_ARGS)
3579 emit_single_push_insn (mode, x, type);
3583 if (GET_CODE (args_so_far) == CONST_INT)
3585 = memory_address (mode,
3586 plus_constant (args_addr,
3587 INTVAL (args_so_far)));
3589 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3592 dest = gen_rtx_MEM (mode, addr);
3595 set_mem_attributes (dest, type, 1);
3596 /* Function incoming arguments may overlap with sibling call
3597 outgoing arguments and we cannot allow reordering of reads
3598 from function arguments with stores to outgoing arguments
3599 of sibling calls. */
3600 set_mem_alias_set (dest, 0);
3603 emit_move_insn (dest, x);
3607 if (current_function_check_memory_usage && ! in_check_memory_usage)
3609 in_check_memory_usage = 1;
3611 target = get_push_address (GET_MODE_SIZE (mode));
3613 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3614 emit_library_call (chkr_copy_bitmap_libfunc,
3615 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3616 Pmode, XEXP (x, 0), Pmode,
3617 GEN_INT (GET_MODE_SIZE (mode)),
3618 TYPE_MODE (sizetype));
3620 emit_library_call (chkr_set_right_libfunc,
3621 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3622 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3623 TYPE_MODE (sizetype),
3624 GEN_INT (MEMORY_USE_RW),
3625 TYPE_MODE (integer_type_node));
3626 in_check_memory_usage = 0;
3631 /* If part should go in registers, copy that part
3632 into the appropriate registers. Do this now, at the end,
3633 since mem-to-mem copies above may do function calls. */
3634 if (partial > 0 && reg != 0)
3636 /* Handle calls that pass values in multiple non-contiguous locations.
3637 The Irix 6 ABI has examples of this. */
3638 if (GET_CODE (reg) == PARALLEL)
3639 emit_group_load (reg, x, -1, align); /* ??? size? */
3641 move_block_to_reg (REGNO (reg), x, partial, mode);
3644 if (extra && args_addr == 0 && where_pad == stack_direction)
3645 anti_adjust_stack (GEN_INT (extra));
3647 if (alignment_pad && args_addr == 0)
3648 anti_adjust_stack (alignment_pad);
3651 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3659 /* Only registers can be subtargets. */
3660 || GET_CODE (x) != REG
3661 /* If the register is readonly, it can't be set more than once. */
3662 || RTX_UNCHANGING_P (x)
3663 /* Don't use hard regs to avoid extending their life. */
3664 || REGNO (x) < FIRST_PSEUDO_REGISTER
3665 /* Avoid subtargets inside loops,
3666 since they hide some invariant expressions. */
3667 || preserve_subexpressions_p ())
3671 /* Expand an assignment that stores the value of FROM into TO.
3672 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3673 (This may contain a QUEUED rtx;
3674 if the value is constant, this rtx is a constant.)
3675 Otherwise, the returned value is NULL_RTX.
3677 SUGGEST_REG is no longer actually used.
3678 It used to mean, copy the value through a register
3679 and return that register, if that is possible.
3680 We now use WANT_VALUE to decide whether to do this. */
3683 expand_assignment (to, from, want_value, suggest_reg)
3686 int suggest_reg ATTRIBUTE_UNUSED;
3691 /* Don't crash if the lhs of the assignment was erroneous. */
3693 if (TREE_CODE (to) == ERROR_MARK)
3695 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3696 return want_value ? result : NULL_RTX;
3699 /* Assignment of a structure component needs special treatment
3700 if the structure component's rtx is not simply a MEM.
3701 Assignment of an array element at a constant index, and assignment of
3702 an array element in an unaligned packed structure field, has the same
3705 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3706 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3708 enum machine_mode mode1;
3709 HOST_WIDE_INT bitsize, bitpos;
3714 unsigned int alignment;
3717 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3718 &unsignedp, &volatilep, &alignment);
3720 /* If we are going to use store_bit_field and extract_bit_field,
3721 make sure to_rtx will be safe for multiple use. */
3723 if (mode1 == VOIDmode && want_value)
3724 tem = stabilize_reference (tem);
3726 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3729 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3731 if (GET_CODE (to_rtx) != MEM)
3734 if (GET_MODE (offset_rtx) != ptr_mode)
3736 #ifdef POINTERS_EXTEND_UNSIGNED
3737 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3739 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3743 /* A constant address in TO_RTX can have VOIDmode, we must not try
3744 to call force_reg for that case. Avoid that case. */
3745 if (GET_CODE (to_rtx) == MEM
3746 && GET_MODE (to_rtx) == BLKmode
3747 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3749 && (bitpos % bitsize) == 0
3750 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3751 && alignment == GET_MODE_ALIGNMENT (mode1))
3754 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3756 if (GET_CODE (XEXP (temp, 0)) == REG)
3759 to_rtx = (replace_equiv_address
3760 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3765 to_rtx = offset_address (to_rtx, offset_rtx,
3766 highest_pow2_factor (offset));
3771 if (GET_CODE (to_rtx) == MEM)
3773 /* When the offset is zero, to_rtx is the address of the
3774 structure we are storing into, and hence may be shared.
3775 We must make a new MEM before setting the volatile bit. */
3777 to_rtx = copy_rtx (to_rtx);
3779 MEM_VOLATILE_P (to_rtx) = 1;
3781 #if 0 /* This was turned off because, when a field is volatile
3782 in an object which is not volatile, the object may be in a register,
3783 and then we would abort over here. */
3789 if (TREE_CODE (to) == COMPONENT_REF
3790 && TREE_READONLY (TREE_OPERAND (to, 1)))
3793 to_rtx = copy_rtx (to_rtx);
3795 RTX_UNCHANGING_P (to_rtx) = 1;
3798 /* Check the access. */
3799 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3804 enum machine_mode best_mode;
3806 best_mode = get_best_mode (bitsize, bitpos,
3807 TYPE_ALIGN (TREE_TYPE (tem)),
3809 if (best_mode == VOIDmode)
3812 best_mode_size = GET_MODE_BITSIZE (best_mode);
3813 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3814 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3815 size *= GET_MODE_SIZE (best_mode);
3817 /* Check the access right of the pointer. */
3818 in_check_memory_usage = 1;
3820 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3821 VOIDmode, 3, to_addr, Pmode,
3822 GEN_INT (size), TYPE_MODE (sizetype),
3823 GEN_INT (MEMORY_USE_WO),
3824 TYPE_MODE (integer_type_node));
3825 in_check_memory_usage = 0;
3828 /* If this is a varying-length object, we must get the address of
3829 the source and do an explicit block move. */
3832 unsigned int from_align;
3833 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3835 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3837 emit_block_move (inner_to_rtx, from_rtx, expr_size (from));
3845 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3847 /* Spurious cast for HPUX compiler. */
3848 ? ((enum machine_mode)
3849 TYPE_MODE (TREE_TYPE (to)))
3853 int_size_in_bytes (TREE_TYPE (tem)),
3854 get_alias_set (to));
3856 preserve_temp_slots (result);
3860 /* If the value is meaningful, convert RESULT to the proper mode.
3861 Otherwise, return nothing. */
3862 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3863 TYPE_MODE (TREE_TYPE (from)),
3865 TREE_UNSIGNED (TREE_TYPE (to)))
3870 /* If the rhs is a function call and its value is not an aggregate,
3871 call the function before we start to compute the lhs.
3872 This is needed for correct code for cases such as
3873 val = setjmp (buf) on machines where reference to val
3874 requires loading up part of an address in a separate insn.
3876 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3877 since it might be a promoted variable where the zero- or sign- extension
3878 needs to be done. Handling this in the normal way is safe because no
3879 computation is done before the call. */
3880 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3881 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3882 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3883 && GET_CODE (DECL_RTL (to)) == REG))
3888 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3890 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3892 /* Handle calls that return values in multiple non-contiguous locations.
3893 The Irix 6 ABI has examples of this. */
3894 if (GET_CODE (to_rtx) == PARALLEL)
3895 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3896 TYPE_ALIGN (TREE_TYPE (from)));
3897 else if (GET_MODE (to_rtx) == BLKmode)
3898 emit_block_move (to_rtx, value, expr_size (from));
3901 #ifdef POINTERS_EXTEND_UNSIGNED
3902 if (POINTER_TYPE_P (TREE_TYPE (to))
3903 && GET_MODE (to_rtx) != GET_MODE (value))
3904 value = convert_memory_address (GET_MODE (to_rtx), value);
3906 emit_move_insn (to_rtx, value);
3908 preserve_temp_slots (to_rtx);
3911 return want_value ? to_rtx : NULL_RTX;
3914 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3915 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3918 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3920 /* Don't move directly into a return register. */
3921 if (TREE_CODE (to) == RESULT_DECL
3922 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3927 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3929 if (GET_CODE (to_rtx) == PARALLEL)
3930 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3931 TYPE_ALIGN (TREE_TYPE (from)));
3933 emit_move_insn (to_rtx, temp);
3935 preserve_temp_slots (to_rtx);
3938 return want_value ? to_rtx : NULL_RTX;
3941 /* In case we are returning the contents of an object which overlaps
3942 the place the value is being stored, use a safe function when copying
3943 a value through a pointer into a structure value return block. */
3944 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3945 && current_function_returns_struct
3946 && !current_function_returns_pcc_struct)
3951 size = expr_size (from);
3952 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3953 EXPAND_MEMORY_USE_DONT);
3955 /* Copy the rights of the bitmap. */
3956 if (current_function_check_memory_usage)
3957 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3958 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3959 XEXP (from_rtx, 0), Pmode,
3960 convert_to_mode (TYPE_MODE (sizetype),
3961 size, TREE_UNSIGNED (sizetype)),
3962 TYPE_MODE (sizetype));
3964 #ifdef TARGET_MEM_FUNCTIONS
3965 emit_library_call (memmove_libfunc, LCT_NORMAL,
3966 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3967 XEXP (from_rtx, 0), Pmode,
3968 convert_to_mode (TYPE_MODE (sizetype),
3969 size, TREE_UNSIGNED (sizetype)),
3970 TYPE_MODE (sizetype));
3972 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3973 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3974 XEXP (to_rtx, 0), Pmode,
3975 convert_to_mode (TYPE_MODE (integer_type_node),
3976 size, TREE_UNSIGNED (integer_type_node)),
3977 TYPE_MODE (integer_type_node));
3980 preserve_temp_slots (to_rtx);
3983 return want_value ? to_rtx : NULL_RTX;
3986 /* Compute FROM and store the value in the rtx we got. */
3989 result = store_expr (from, to_rtx, want_value);
3990 preserve_temp_slots (result);
3993 return want_value ? result : NULL_RTX;
3996 /* Generate code for computing expression EXP,
3997 and storing the value into TARGET.
3998 TARGET may contain a QUEUED rtx.
4000 If WANT_VALUE is nonzero, return a copy of the value
4001 not in TARGET, so that we can be sure to use the proper
4002 value in a containing expression even if TARGET has something
4003 else stored in it. If possible, we copy the value through a pseudo
4004 and return that pseudo. Or, if the value is constant, we try to
4005 return the constant. In some cases, we return a pseudo
4006 copied *from* TARGET.
4008 If the mode is BLKmode then we may return TARGET itself.
4009 It turns out that in BLKmode it doesn't cause a problem.
4010 because C has no operators that could combine two different
4011 assignments into the same BLKmode object with different values
4012 with no sequence point. Will other languages need this to
4015 If WANT_VALUE is 0, we return NULL, to make sure
4016 to catch quickly any cases where the caller uses the value
4017 and fails to set WANT_VALUE. */
4020 store_expr (exp, target, want_value)
4026 int dont_return_target = 0;
4027 int dont_store_target = 0;
4029 if (TREE_CODE (exp) == COMPOUND_EXPR)
4031 /* Perform first part of compound expression, then assign from second
4033 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4035 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4037 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4039 /* For conditional expression, get safe form of the target. Then
4040 test the condition, doing the appropriate assignment on either
4041 side. This avoids the creation of unnecessary temporaries.
4042 For non-BLKmode, it is more efficient not to do this. */
4044 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4047 target = protect_from_queue (target, 1);
4049 do_pending_stack_adjust ();
4051 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4052 start_cleanup_deferral ();
4053 store_expr (TREE_OPERAND (exp, 1), target, 0);
4054 end_cleanup_deferral ();
4056 emit_jump_insn (gen_jump (lab2));
4059 start_cleanup_deferral ();
4060 store_expr (TREE_OPERAND (exp, 2), target, 0);
4061 end_cleanup_deferral ();
4066 return want_value ? target : NULL_RTX;
4068 else if (queued_subexp_p (target))
4069 /* If target contains a postincrement, let's not risk
4070 using it as the place to generate the rhs. */
4072 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4074 /* Expand EXP into a new pseudo. */
4075 temp = gen_reg_rtx (GET_MODE (target));
4076 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4079 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4081 /* If target is volatile, ANSI requires accessing the value
4082 *from* the target, if it is accessed. So make that happen.
4083 In no case return the target itself. */
4084 if (! MEM_VOLATILE_P (target) && want_value)
4085 dont_return_target = 1;
4087 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4088 && GET_MODE (target) != BLKmode)
4089 /* If target is in memory and caller wants value in a register instead,
4090 arrange that. Pass TARGET as target for expand_expr so that,
4091 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4092 We know expand_expr will not use the target in that case.
4093 Don't do this if TARGET is volatile because we are supposed
4094 to write it and then read it. */
4096 temp = expand_expr (exp, target, GET_MODE (target), 0);
4097 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4099 /* If TEMP is already in the desired TARGET, only copy it from
4100 memory and don't store it there again. */
4102 || (rtx_equal_p (temp, target)
4103 && ! side_effects_p (temp) && ! side_effects_p (target)))
4104 dont_store_target = 1;
4105 temp = copy_to_reg (temp);
4107 dont_return_target = 1;
4109 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4110 /* If this is an scalar in a register that is stored in a wider mode
4111 than the declared mode, compute the result into its declared mode
4112 and then convert to the wider mode. Our value is the computed
4115 /* If we don't want a value, we can do the conversion inside EXP,
4116 which will often result in some optimizations. Do the conversion
4117 in two steps: first change the signedness, if needed, then
4118 the extend. But don't do this if the type of EXP is a subtype
4119 of something else since then the conversion might involve
4120 more than just converting modes. */
4121 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4122 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4124 if (TREE_UNSIGNED (TREE_TYPE (exp))
4125 != SUBREG_PROMOTED_UNSIGNED_P (target))
4128 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4132 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4133 SUBREG_PROMOTED_UNSIGNED_P (target)),
4137 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4139 /* If TEMP is a volatile MEM and we want a result value, make
4140 the access now so it gets done only once. Likewise if
4141 it contains TARGET. */
4142 if (GET_CODE (temp) == MEM && want_value
4143 && (MEM_VOLATILE_P (temp)
4144 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4145 temp = copy_to_reg (temp);
4147 /* If TEMP is a VOIDmode constant, use convert_modes to make
4148 sure that we properly convert it. */
4149 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4151 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4152 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4153 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4154 GET_MODE (target), temp,
4155 SUBREG_PROMOTED_UNSIGNED_P (target));
4158 convert_move (SUBREG_REG (target), temp,
4159 SUBREG_PROMOTED_UNSIGNED_P (target));
4161 /* If we promoted a constant, change the mode back down to match
4162 target. Otherwise, the caller might get confused by a result whose
4163 mode is larger than expected. */
4165 if (want_value && GET_MODE (temp) != GET_MODE (target)
4166 && GET_MODE (temp) != VOIDmode)
4168 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4169 SUBREG_PROMOTED_VAR_P (temp) = 1;
4170 SUBREG_PROMOTED_UNSIGNED_P (temp)
4171 = SUBREG_PROMOTED_UNSIGNED_P (target);
4174 return want_value ? temp : NULL_RTX;
4178 temp = expand_expr (exp, target, GET_MODE (target), 0);
4179 /* Return TARGET if it's a specified hardware register.
4180 If TARGET is a volatile mem ref, either return TARGET
4181 or return a reg copied *from* TARGET; ANSI requires this.
4183 Otherwise, if TEMP is not TARGET, return TEMP
4184 if it is constant (for efficiency),
4185 or if we really want the correct value. */
4186 if (!(target && GET_CODE (target) == REG
4187 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4188 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4189 && ! rtx_equal_p (temp, target)
4190 && (CONSTANT_P (temp) || want_value))
4191 dont_return_target = 1;
4194 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4195 the same as that of TARGET, adjust the constant. This is needed, for
4196 example, in case it is a CONST_DOUBLE and we want only a word-sized
4198 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4199 && TREE_CODE (exp) != ERROR_MARK
4200 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4201 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4202 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4204 if (current_function_check_memory_usage
4205 && GET_CODE (target) == MEM
4206 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4208 in_check_memory_usage = 1;
4209 if (GET_CODE (temp) == MEM)
4210 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4211 VOIDmode, 3, XEXP (target, 0), Pmode,
4212 XEXP (temp, 0), Pmode,
4213 expr_size (exp), TYPE_MODE (sizetype));
4215 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4216 VOIDmode, 3, XEXP (target, 0), Pmode,
4217 expr_size (exp), TYPE_MODE (sizetype),
4218 GEN_INT (MEMORY_USE_WO),
4219 TYPE_MODE (integer_type_node));
4220 in_check_memory_usage = 0;
4223 /* If value was not generated in the target, store it there.
4224 Convert the value to TARGET's type first if nec. */
4225 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4226 one or both of them are volatile memory refs, we have to distinguish
4228 - expand_expr has used TARGET. In this case, we must not generate
4229 another copy. This can be detected by TARGET being equal according
4231 - expand_expr has not used TARGET - that means that the source just
4232 happens to have the same RTX form. Since temp will have been created
4233 by expand_expr, it will compare unequal according to == .
4234 We must generate a copy in this case, to reach the correct number
4235 of volatile memory references. */
4237 if ((! rtx_equal_p (temp, target)
4238 || (temp != target && (side_effects_p (temp)
4239 || side_effects_p (target))))
4240 && TREE_CODE (exp) != ERROR_MARK
4241 && ! dont_store_target)
4243 target = protect_from_queue (target, 1);
4244 if (GET_MODE (temp) != GET_MODE (target)
4245 && GET_MODE (temp) != VOIDmode)
4247 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4248 if (dont_return_target)
4250 /* In this case, we will return TEMP,
4251 so make sure it has the proper mode.
4252 But don't forget to store the value into TARGET. */
4253 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4254 emit_move_insn (target, temp);
4257 convert_move (target, temp, unsignedp);
4260 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4262 /* Handle copying a string constant into an array.
4263 The string constant may be shorter than the array.
4264 So copy just the string's actual length, and clear the rest. */
4268 /* Get the size of the data type of the string,
4269 which is actually the size of the target. */
4270 size = expr_size (exp);
4271 if (GET_CODE (size) == CONST_INT
4272 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4273 emit_block_move (target, temp, size);
4276 /* Compute the size of the data to copy from the string. */
4278 = size_binop (MIN_EXPR,
4279 make_tree (sizetype, size),
4280 size_int (TREE_STRING_LENGTH (exp)));
4281 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4285 /* Copy that much. */
4286 emit_block_move (target, temp, copy_size_rtx);
4288 /* Figure out how much is left in TARGET that we have to clear.
4289 Do all calculations in ptr_mode. */
4291 addr = XEXP (target, 0);
4292 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4294 if (GET_CODE (copy_size_rtx) == CONST_INT)
4296 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4297 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4301 addr = force_reg (ptr_mode, addr);
4302 addr = expand_binop (ptr_mode, add_optab, addr,
4303 copy_size_rtx, NULL_RTX, 0,
4306 size = expand_binop (ptr_mode, sub_optab, size,
4307 copy_size_rtx, NULL_RTX, 0,
4310 label = gen_label_rtx ();
4311 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4312 GET_MODE (size), 0, 0, label);
4315 if (size != const0_rtx)
4317 rtx dest = gen_rtx_MEM (BLKmode, addr);
4319 MEM_COPY_ATTRIBUTES (dest, target);
4321 /* Be sure we can write on ADDR. */
4322 in_check_memory_usage = 1;
4323 if (current_function_check_memory_usage)
4324 emit_library_call (chkr_check_addr_libfunc,
4325 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4327 size, TYPE_MODE (sizetype),
4328 GEN_INT (MEMORY_USE_WO),
4329 TYPE_MODE (integer_type_node));
4330 in_check_memory_usage = 0;
4331 clear_storage (dest, size);
4338 /* Handle calls that return values in multiple non-contiguous locations.
4339 The Irix 6 ABI has examples of this. */
4340 else if (GET_CODE (target) == PARALLEL)
4341 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4342 TYPE_ALIGN (TREE_TYPE (exp)));
4343 else if (GET_MODE (temp) == BLKmode)
4344 emit_block_move (target, temp, expr_size (exp));
4346 emit_move_insn (target, temp);
4349 /* If we don't want a value, return NULL_RTX. */
4353 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4354 ??? The latter test doesn't seem to make sense. */
4355 else if (dont_return_target && GET_CODE (temp) != MEM)
4358 /* Return TARGET itself if it is a hard register. */
4359 else if (want_value && GET_MODE (target) != BLKmode
4360 && ! (GET_CODE (target) == REG
4361 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4362 return copy_to_reg (target);
4368 /* Return 1 if EXP just contains zeros. */
4376 switch (TREE_CODE (exp))
4380 case NON_LVALUE_EXPR:
4381 return is_zeros_p (TREE_OPERAND (exp, 0));
4384 return integer_zerop (exp);
4388 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4391 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4394 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4395 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4396 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4397 if (! is_zeros_p (TREE_VALUE (elt)))
4407 /* Return 1 if EXP contains mostly (3/4) zeros. */
4410 mostly_zeros_p (exp)
4413 if (TREE_CODE (exp) == CONSTRUCTOR)
4415 int elts = 0, zeros = 0;
4416 tree elt = CONSTRUCTOR_ELTS (exp);
4417 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4419 /* If there are no ranges of true bits, it is all zero. */
4420 return elt == NULL_TREE;
4422 for (; elt; elt = TREE_CHAIN (elt))
4424 /* We do not handle the case where the index is a RANGE_EXPR,
4425 so the statistic will be somewhat inaccurate.
4426 We do make a more accurate count in store_constructor itself,
4427 so since this function is only used for nested array elements,
4428 this should be close enough. */
4429 if (mostly_zeros_p (TREE_VALUE (elt)))
4434 return 4 * zeros >= 3 * elts;
4437 return is_zeros_p (exp);
4440 /* Helper function for store_constructor.
4441 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4442 TYPE is the type of the CONSTRUCTOR, not the element type.
4443 ALIGN and CLEARED are as for store_constructor.
4444 ALIAS_SET is the alias set to use for any stores.
4446 This provides a recursive shortcut back to store_constructor when it isn't
4447 necessary to go through store_field. This is so that we can pass through
4448 the cleared field to let store_constructor know that we may not have to
4449 clear a substructure if the outer structure has already been cleared. */
4452 store_constructor_field (target, bitsize, bitpos,
4453 mode, exp, type, align, cleared, alias_set)
4455 unsigned HOST_WIDE_INT bitsize;
4456 HOST_WIDE_INT bitpos;
4457 enum machine_mode mode;
4463 if (TREE_CODE (exp) == CONSTRUCTOR
4464 && bitpos % BITS_PER_UNIT == 0
4465 /* If we have a non-zero bitpos for a register target, then we just
4466 let store_field do the bitfield handling. This is unlikely to
4467 generate unnecessary clear instructions anyways. */
4468 && (bitpos == 0 || GET_CODE (target) == MEM))
4472 = adjust_address (target,
4473 GET_MODE (target) == BLKmode
4475 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4476 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4479 /* Show the alignment may no longer be what it was and update the alias
4480 set, if required. */
4482 align = MIN (align, (unsigned int) bitpos & - bitpos);
4483 if (GET_CODE (target) == MEM)
4484 set_mem_alias_set (target, alias_set);
4486 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4489 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4490 int_size_in_bytes (type), alias_set);
4493 /* Store the value of constructor EXP into the rtx TARGET.
4494 TARGET is either a REG or a MEM.
4495 ALIGN is the maximum known alignment for TARGET.
4496 CLEARED is true if TARGET is known to have been zero'd.
4497 SIZE is the number of bytes of TARGET we are allowed to modify: this
4498 may not be the same as the size of EXP if we are assigning to a field
4499 which has been packed to exclude padding bits. */
4502 store_constructor (exp, target, align, cleared, size)
4509 tree type = TREE_TYPE (exp);
4510 #ifdef WORD_REGISTER_OPERATIONS
4511 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4514 /* We know our target cannot conflict, since safe_from_p has been called. */
4516 /* Don't try copying piece by piece into a hard register
4517 since that is vulnerable to being clobbered by EXP.
4518 Instead, construct in a pseudo register and then copy it all. */
4519 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4521 rtx temp = gen_reg_rtx (GET_MODE (target));
4522 store_constructor (exp, temp, align, cleared, size);
4523 emit_move_insn (target, temp);
4528 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4529 || TREE_CODE (type) == QUAL_UNION_TYPE)
4533 /* Inform later passes that the whole union value is dead. */
4534 if ((TREE_CODE (type) == UNION_TYPE
4535 || TREE_CODE (type) == QUAL_UNION_TYPE)
4538 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4540 /* If the constructor is empty, clear the union. */
4541 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4542 clear_storage (target, expr_size (exp));
4545 /* If we are building a static constructor into a register,
4546 set the initial value as zero so we can fold the value into
4547 a constant. But if more than one register is involved,
4548 this probably loses. */
4549 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4550 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4553 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4558 /* If the constructor has fewer fields than the structure
4559 or if we are initializing the structure to mostly zeros,
4560 clear the whole structure first. Don't do this if TARGET is a
4561 register whose mode size isn't equal to SIZE since clear_storage
4562 can't handle this case. */
4564 && ((list_length (CONSTRUCTOR_ELTS (exp))
4565 != fields_length (type))
4566 || mostly_zeros_p (exp))
4567 && (GET_CODE (target) != REG
4568 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4571 clear_storage (target, GEN_INT (size));
4576 /* Inform later passes that the old value is dead. */
4577 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4579 /* Store each element of the constructor into
4580 the corresponding field of TARGET. */
4582 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4584 tree field = TREE_PURPOSE (elt);
4585 #ifdef WORD_REGISTER_OPERATIONS
4586 tree value = TREE_VALUE (elt);
4588 enum machine_mode mode;
4589 HOST_WIDE_INT bitsize;
4590 HOST_WIDE_INT bitpos = 0;
4593 rtx to_rtx = target;
4595 /* Just ignore missing fields.
4596 We cleared the whole structure, above,
4597 if any fields are missing. */
4601 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4604 if (host_integerp (DECL_SIZE (field), 1))
4605 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4609 unsignedp = TREE_UNSIGNED (field);
4610 mode = DECL_MODE (field);
4611 if (DECL_BIT_FIELD (field))
4614 offset = DECL_FIELD_OFFSET (field);
4615 if (host_integerp (offset, 0)
4616 && host_integerp (bit_position (field), 0))
4618 bitpos = int_bit_position (field);
4622 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4628 if (contains_placeholder_p (offset))
4629 offset = build (WITH_RECORD_EXPR, sizetype,
4630 offset, make_tree (TREE_TYPE (exp), target));
4632 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4633 if (GET_CODE (to_rtx) != MEM)
4636 if (GET_MODE (offset_rtx) != ptr_mode)
4637 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4639 #ifdef POINTERS_EXTEND_UNSIGNED
4640 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4643 to_rtx = offset_address (to_rtx, offset_rtx,
4644 highest_pow2_factor (offset));
4646 align = DECL_OFFSET_ALIGN (field);
4649 if (TREE_READONLY (field))
4651 if (GET_CODE (to_rtx) == MEM)
4652 to_rtx = copy_rtx (to_rtx);
4654 RTX_UNCHANGING_P (to_rtx) = 1;
4657 #ifdef WORD_REGISTER_OPERATIONS
4658 /* If this initializes a field that is smaller than a word, at the
4659 start of a word, try to widen it to a full word.
4660 This special case allows us to output C++ member function
4661 initializations in a form that the optimizers can understand. */
4662 if (GET_CODE (target) == REG
4663 && bitsize < BITS_PER_WORD
4664 && bitpos % BITS_PER_WORD == 0
4665 && GET_MODE_CLASS (mode) == MODE_INT
4666 && TREE_CODE (value) == INTEGER_CST
4668 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4670 tree type = TREE_TYPE (value);
4671 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4673 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4674 value = convert (type, value);
4676 if (BYTES_BIG_ENDIAN)
4678 = fold (build (LSHIFT_EXPR, type, value,
4679 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4680 bitsize = BITS_PER_WORD;
4684 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4685 TREE_VALUE (elt), type, align, cleared,
4686 (DECL_NONADDRESSABLE_P (field)
4687 && GET_CODE (to_rtx) == MEM)
4688 ? MEM_ALIAS_SET (to_rtx)
4689 : get_alias_set (TREE_TYPE (field)));
4692 else if (TREE_CODE (type) == ARRAY_TYPE)
4697 tree domain = TYPE_DOMAIN (type);
4698 tree elttype = TREE_TYPE (type);
4699 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4700 && TYPE_MAX_VALUE (domain)
4701 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4702 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4703 HOST_WIDE_INT minelt = 0;
4704 HOST_WIDE_INT maxelt = 0;
4706 /* If we have constant bounds for the range of the type, get them. */
4709 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4710 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4713 /* If the constructor has fewer elements than the array,
4714 clear the whole array first. Similarly if this is
4715 static constructor of a non-BLKmode object. */
4716 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4720 HOST_WIDE_INT count = 0, zero_count = 0;
4721 need_to_clear = ! const_bounds_p;
4723 /* This loop is a more accurate version of the loop in
4724 mostly_zeros_p (it handles RANGE_EXPR in an index).
4725 It is also needed to check for missing elements. */
4726 for (elt = CONSTRUCTOR_ELTS (exp);
4727 elt != NULL_TREE && ! need_to_clear;
4728 elt = TREE_CHAIN (elt))
4730 tree index = TREE_PURPOSE (elt);
4731 HOST_WIDE_INT this_node_count;
4733 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4735 tree lo_index = TREE_OPERAND (index, 0);
4736 tree hi_index = TREE_OPERAND (index, 1);
4738 if (! host_integerp (lo_index, 1)
4739 || ! host_integerp (hi_index, 1))
4745 this_node_count = (tree_low_cst (hi_index, 1)
4746 - tree_low_cst (lo_index, 1) + 1);
4749 this_node_count = 1;
4751 count += this_node_count;
4752 if (mostly_zeros_p (TREE_VALUE (elt)))
4753 zero_count += this_node_count;
4756 /* Clear the entire array first if there are any missing elements,
4757 or if the incidence of zero elements is >= 75%. */
4759 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4763 if (need_to_clear && size > 0)
4766 clear_storage (target, GEN_INT (size));
4769 else if (REG_P (target))
4770 /* Inform later passes that the old value is dead. */
4771 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4773 /* Store each element of the constructor into
4774 the corresponding element of TARGET, determined
4775 by counting the elements. */
4776 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4778 elt = TREE_CHAIN (elt), i++)
4780 enum machine_mode mode;
4781 HOST_WIDE_INT bitsize;
4782 HOST_WIDE_INT bitpos;
4784 tree value = TREE_VALUE (elt);
4785 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4786 tree index = TREE_PURPOSE (elt);
4787 rtx xtarget = target;
4789 if (cleared && is_zeros_p (value))
4792 unsignedp = TREE_UNSIGNED (elttype);
4793 mode = TYPE_MODE (elttype);
4794 if (mode == BLKmode)
4795 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4796 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4799 bitsize = GET_MODE_BITSIZE (mode);
4801 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4803 tree lo_index = TREE_OPERAND (index, 0);
4804 tree hi_index = TREE_OPERAND (index, 1);
4805 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4806 struct nesting *loop;
4807 HOST_WIDE_INT lo, hi, count;
4810 /* If the range is constant and "small", unroll the loop. */
4812 && host_integerp (lo_index, 0)
4813 && host_integerp (hi_index, 0)
4814 && (lo = tree_low_cst (lo_index, 0),
4815 hi = tree_low_cst (hi_index, 0),
4816 count = hi - lo + 1,
4817 (GET_CODE (target) != MEM
4819 || (host_integerp (TYPE_SIZE (elttype), 1)
4820 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4823 lo -= minelt; hi -= minelt;
4824 for (; lo <= hi; lo++)
4826 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4827 store_constructor_field
4828 (target, bitsize, bitpos, mode, value, type, align,
4830 TYPE_NONALIASED_COMPONENT (type)
4831 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4836 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4837 loop_top = gen_label_rtx ();
4838 loop_end = gen_label_rtx ();
4840 unsignedp = TREE_UNSIGNED (domain);
4842 index = build_decl (VAR_DECL, NULL_TREE, domain);
4845 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4847 SET_DECL_RTL (index, index_r);
4848 if (TREE_CODE (value) == SAVE_EXPR
4849 && SAVE_EXPR_RTL (value) == 0)
4851 /* Make sure value gets expanded once before the
4853 expand_expr (value, const0_rtx, VOIDmode, 0);
4856 store_expr (lo_index, index_r, 0);
4857 loop = expand_start_loop (0);
4859 /* Assign value to element index. */
4861 = convert (ssizetype,
4862 fold (build (MINUS_EXPR, TREE_TYPE (index),
4863 index, TYPE_MIN_VALUE (domain))));
4864 position = size_binop (MULT_EXPR, position,
4866 TYPE_SIZE_UNIT (elttype)));
4868 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4869 xtarget = offset_address (target, pos_rtx,
4870 highest_pow2_factor (position));
4871 xtarget = adjust_address (xtarget, mode, 0);
4872 if (TREE_CODE (value) == CONSTRUCTOR)
4873 store_constructor (value, xtarget, align, cleared,
4874 bitsize / BITS_PER_UNIT);
4876 store_expr (value, xtarget, 0);
4878 expand_exit_loop_if_false (loop,
4879 build (LT_EXPR, integer_type_node,
4882 expand_increment (build (PREINCREMENT_EXPR,
4884 index, integer_one_node), 0, 0);
4886 emit_label (loop_end);
4889 else if ((index != 0 && ! host_integerp (index, 0))
4890 || ! host_integerp (TYPE_SIZE (elttype), 1))
4895 index = ssize_int (1);
4898 index = convert (ssizetype,
4899 fold (build (MINUS_EXPR, index,
4900 TYPE_MIN_VALUE (domain))));
4902 position = size_binop (MULT_EXPR, index,
4904 TYPE_SIZE_UNIT (elttype)));
4905 xtarget = offset_address (target,
4906 expand_expr (position, 0, VOIDmode, 0),
4907 highest_pow2_factor (position));
4908 xtarget = adjust_address (xtarget, mode, 0);
4909 store_expr (value, xtarget, 0);
4914 bitpos = ((tree_low_cst (index, 0) - minelt)
4915 * tree_low_cst (TYPE_SIZE (elttype), 1));
4917 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4919 store_constructor_field (target, bitsize, bitpos, mode, value,
4920 type, align, cleared,
4921 TYPE_NONALIASED_COMPONENT (type)
4922 && GET_CODE (target) == MEM
4923 ? MEM_ALIAS_SET (target) :
4924 get_alias_set (elttype));
4930 /* Set constructor assignments. */
4931 else if (TREE_CODE (type) == SET_TYPE)
4933 tree elt = CONSTRUCTOR_ELTS (exp);
4934 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4935 tree domain = TYPE_DOMAIN (type);
4936 tree domain_min, domain_max, bitlength;
4938 /* The default implementation strategy is to extract the constant
4939 parts of the constructor, use that to initialize the target,
4940 and then "or" in whatever non-constant ranges we need in addition.
4942 If a large set is all zero or all ones, it is
4943 probably better to set it using memset (if available) or bzero.
4944 Also, if a large set has just a single range, it may also be
4945 better to first clear all the first clear the set (using
4946 bzero/memset), and set the bits we want. */
4948 /* Check for all zeros. */
4949 if (elt == NULL_TREE && size > 0)
4952 clear_storage (target, GEN_INT (size));
4956 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4957 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4958 bitlength = size_binop (PLUS_EXPR,
4959 size_diffop (domain_max, domain_min),
4962 nbits = tree_low_cst (bitlength, 1);
4964 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4965 are "complicated" (more than one range), initialize (the
4966 constant parts) by copying from a constant. */
4967 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4968 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4970 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4971 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4972 char *bit_buffer = (char *) alloca (nbits);
4973 HOST_WIDE_INT word = 0;
4974 unsigned int bit_pos = 0;
4975 unsigned int ibit = 0;
4976 unsigned int offset = 0; /* In bytes from beginning of set. */
4978 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4981 if (bit_buffer[ibit])
4983 if (BYTES_BIG_ENDIAN)
4984 word |= (1 << (set_word_size - 1 - bit_pos));
4986 word |= 1 << bit_pos;
4990 if (bit_pos >= set_word_size || ibit == nbits)
4992 if (word != 0 || ! cleared)
4994 rtx datum = GEN_INT (word);
4997 /* The assumption here is that it is safe to use
4998 XEXP if the set is multi-word, but not if
4999 it's single-word. */
5000 if (GET_CODE (target) == MEM)
5001 to_rtx = adjust_address (target, mode, offset);
5002 else if (offset == 0)
5006 emit_move_insn (to_rtx, datum);
5013 offset += set_word_size / BITS_PER_UNIT;
5018 /* Don't bother clearing storage if the set is all ones. */
5019 if (TREE_CHAIN (elt) != NULL_TREE
5020 || (TREE_PURPOSE (elt) == NULL_TREE
5022 : ( ! host_integerp (TREE_VALUE (elt), 0)
5023 || ! host_integerp (TREE_PURPOSE (elt), 0)
5024 || (tree_low_cst (TREE_VALUE (elt), 0)
5025 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5026 != (HOST_WIDE_INT) nbits))))
5027 clear_storage (target, expr_size (exp));
5029 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5031 /* Start of range of element or NULL. */
5032 tree startbit = TREE_PURPOSE (elt);
5033 /* End of range of element, or element value. */
5034 tree endbit = TREE_VALUE (elt);
5035 #ifdef TARGET_MEM_FUNCTIONS
5036 HOST_WIDE_INT startb, endb;
5038 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5040 bitlength_rtx = expand_expr (bitlength,
5041 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5043 /* Handle non-range tuple element like [ expr ]. */
5044 if (startbit == NULL_TREE)
5046 startbit = save_expr (endbit);
5050 startbit = convert (sizetype, startbit);
5051 endbit = convert (sizetype, endbit);
5052 if (! integer_zerop (domain_min))
5054 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5055 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5057 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5058 EXPAND_CONST_ADDRESS);
5059 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5060 EXPAND_CONST_ADDRESS);
5066 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5069 emit_move_insn (targetx, target);
5072 else if (GET_CODE (target) == MEM)
5077 #ifdef TARGET_MEM_FUNCTIONS
5078 /* Optimization: If startbit and endbit are
5079 constants divisible by BITS_PER_UNIT,
5080 call memset instead. */
5081 if (TREE_CODE (startbit) == INTEGER_CST
5082 && TREE_CODE (endbit) == INTEGER_CST
5083 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5084 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5086 emit_library_call (memset_libfunc, LCT_NORMAL,
5088 plus_constant (XEXP (targetx, 0),
5089 startb / BITS_PER_UNIT),
5091 constm1_rtx, TYPE_MODE (integer_type_node),
5092 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5093 TYPE_MODE (sizetype));
5097 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5098 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5099 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5100 startbit_rtx, TYPE_MODE (sizetype),
5101 endbit_rtx, TYPE_MODE (sizetype));
5104 emit_move_insn (target, targetx);
5112 /* Store the value of EXP (an expression tree)
5113 into a subfield of TARGET which has mode MODE and occupies
5114 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5115 If MODE is VOIDmode, it means that we are storing into a bit-field.
5117 If VALUE_MODE is VOIDmode, return nothing in particular.
5118 UNSIGNEDP is not used in this case.
5120 Otherwise, return an rtx for the value stored. This rtx
5121 has mode VALUE_MODE if that is convenient to do.
5122 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5124 ALIGN is the alignment that TARGET is known to have.
5125 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5127 ALIAS_SET is the alias set for the destination. This value will
5128 (in general) be different from that for TARGET, since TARGET is a
5129 reference to the containing structure. */
5132 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5133 unsignedp, align, total_size, alias_set)
5135 HOST_WIDE_INT bitsize;
5136 HOST_WIDE_INT bitpos;
5137 enum machine_mode mode;
5139 enum machine_mode value_mode;
5142 HOST_WIDE_INT total_size;
5145 HOST_WIDE_INT width_mask = 0;
5147 if (TREE_CODE (exp) == ERROR_MARK)
5150 /* If we have nothing to store, do nothing unless the expression has
5153 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5155 if (bitsize < HOST_BITS_PER_WIDE_INT)
5156 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5158 /* If we are storing into an unaligned field of an aligned union that is
5159 in a register, we may have the mode of TARGET being an integer mode but
5160 MODE == BLKmode. In that case, get an aligned object whose size and
5161 alignment are the same as TARGET and store TARGET into it (we can avoid
5162 the store if the field being stored is the entire width of TARGET). Then
5163 call ourselves recursively to store the field into a BLKmode version of
5164 that object. Finally, load from the object into TARGET. This is not
5165 very efficient in general, but should only be slightly more expensive
5166 than the otherwise-required unaligned accesses. Perhaps this can be
5167 cleaned up later. */
5170 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5174 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5177 rtx blk_object = copy_rtx (object);
5179 PUT_MODE (blk_object, BLKmode);
5181 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5182 emit_move_insn (object, target);
5184 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5185 align, total_size, alias_set);
5187 /* Even though we aren't returning target, we need to
5188 give it the updated value. */
5189 emit_move_insn (target, object);
5194 if (GET_CODE (target) == CONCAT)
5196 /* We're storing into a struct containing a single __complex. */
5200 return store_expr (exp, target, 0);
5203 /* If the structure is in a register or if the component
5204 is a bit field, we cannot use addressing to access it.
5205 Use bit-field techniques or SUBREG to store in it. */
5207 if (mode == VOIDmode
5208 || (mode != BLKmode && ! direct_store[(int) mode]
5209 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5210 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5211 || GET_CODE (target) == REG
5212 || GET_CODE (target) == SUBREG
5213 /* If the field isn't aligned enough to store as an ordinary memref,
5214 store it as a bit field. */
5215 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5216 && (align < GET_MODE_ALIGNMENT (mode)
5217 || bitpos % GET_MODE_ALIGNMENT (mode)))
5218 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5219 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5220 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5221 /* If the RHS and field are a constant size and the size of the
5222 RHS isn't the same size as the bitfield, we must use bitfield
5225 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5226 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5228 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5230 /* If BITSIZE is narrower than the size of the type of EXP
5231 we will be narrowing TEMP. Normally, what's wanted are the
5232 low-order bits. However, if EXP's type is a record and this is
5233 big-endian machine, we want the upper BITSIZE bits. */
5234 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5235 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5236 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5237 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5238 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5242 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5244 if (mode != VOIDmode && mode != BLKmode
5245 && mode != TYPE_MODE (TREE_TYPE (exp)))
5246 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5248 /* If the modes of TARGET and TEMP are both BLKmode, both
5249 must be in memory and BITPOS must be aligned on a byte
5250 boundary. If so, we simply do a block copy. */
5251 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5253 unsigned int exp_align = expr_align (exp);
5255 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5256 || bitpos % BITS_PER_UNIT != 0)
5259 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5261 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5262 align = MIN (exp_align, align);
5264 /* Find an alignment that is consistent with the bit position. */
5265 while ((bitpos % align) != 0)
5268 emit_block_move (target, temp,
5269 bitsize == -1 ? expr_size (exp)
5270 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5273 return value_mode == VOIDmode ? const0_rtx : target;
5276 /* Store the value in the bitfield. */
5277 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5278 if (value_mode != VOIDmode)
5280 /* The caller wants an rtx for the value. */
5281 /* If possible, avoid refetching from the bitfield itself. */
5283 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5286 enum machine_mode tmode;
5289 return expand_and (temp,
5293 GET_MODE (temp) == VOIDmode
5295 : GET_MODE (temp))), NULL_RTX);
5296 tmode = GET_MODE (temp);
5297 if (tmode == VOIDmode)
5299 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5300 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5301 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5303 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5304 NULL_RTX, value_mode, 0, align,
5311 rtx addr = XEXP (target, 0);
5314 /* If a value is wanted, it must be the lhs;
5315 so make the address stable for multiple use. */
5317 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5318 && ! CONSTANT_ADDRESS_P (addr)
5319 /* A frame-pointer reference is already stable. */
5320 && ! (GET_CODE (addr) == PLUS
5321 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5322 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5323 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5324 target = replace_equiv_address (target, copy_to_reg (addr));
5326 /* Now build a reference to just the desired component. */
5328 to_rtx = copy_rtx (adjust_address (target, mode,
5329 bitpos / BITS_PER_UNIT));
5331 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5332 set_mem_alias_set (to_rtx, alias_set);
5334 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5338 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5339 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5340 codes and find the ultimate containing object, which we return.
5342 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5343 bit position, and *PUNSIGNEDP to the signedness of the field.
5344 If the position of the field is variable, we store a tree
5345 giving the variable offset (in units) in *POFFSET.
5346 This offset is in addition to the bit position.
5347 If the position is not variable, we store 0 in *POFFSET.
5348 We set *PALIGNMENT to the alignment of the address that will be
5349 computed. This is the alignment of the thing we return if *POFFSET
5350 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5352 If any of the extraction expressions is volatile,
5353 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5355 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5356 is a mode that can be used to access the field. In that case, *PBITSIZE
5359 If the field describes a variable-sized object, *PMODE is set to
5360 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5361 this case, but the address of the object can be found. */
5364 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5365 punsignedp, pvolatilep, palignment)
5367 HOST_WIDE_INT *pbitsize;
5368 HOST_WIDE_INT *pbitpos;
5370 enum machine_mode *pmode;
5373 unsigned int *palignment;
5376 enum machine_mode mode = VOIDmode;
5377 tree offset = size_zero_node;
5378 tree bit_offset = bitsize_zero_node;
5379 unsigned int alignment = BIGGEST_ALIGNMENT;
5380 tree placeholder_ptr = 0;
5383 /* First get the mode, signedness, and size. We do this from just the
5384 outermost expression. */
5385 if (TREE_CODE (exp) == COMPONENT_REF)
5387 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5388 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5389 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5391 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5393 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5395 size_tree = TREE_OPERAND (exp, 1);
5396 *punsignedp = TREE_UNSIGNED (exp);
5400 mode = TYPE_MODE (TREE_TYPE (exp));
5401 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5403 if (mode == BLKmode)
5404 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5406 *pbitsize = GET_MODE_BITSIZE (mode);
5411 if (! host_integerp (size_tree, 1))
5412 mode = BLKmode, *pbitsize = -1;
5414 *pbitsize = tree_low_cst (size_tree, 1);
5417 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5418 and find the ultimate containing object. */
5421 if (TREE_CODE (exp) == BIT_FIELD_REF)
5422 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5423 else if (TREE_CODE (exp) == COMPONENT_REF)
5425 tree field = TREE_OPERAND (exp, 1);
5426 tree this_offset = DECL_FIELD_OFFSET (field);
5428 /* If this field hasn't been filled in yet, don't go
5429 past it. This should only happen when folding expressions
5430 made during type construction. */
5431 if (this_offset == 0)
5433 else if (! TREE_CONSTANT (this_offset)
5434 && contains_placeholder_p (this_offset))
5435 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5437 offset = size_binop (PLUS_EXPR, offset, this_offset);
5438 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5439 DECL_FIELD_BIT_OFFSET (field));
5441 if (! host_integerp (offset, 0))
5442 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5445 else if (TREE_CODE (exp) == ARRAY_REF
5446 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5448 tree index = TREE_OPERAND (exp, 1);
5449 tree array = TREE_OPERAND (exp, 0);
5450 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5451 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5452 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5454 /* We assume all arrays have sizes that are a multiple of a byte.
5455 First subtract the lower bound, if any, in the type of the
5456 index, then convert to sizetype and multiply by the size of the
5458 if (low_bound != 0 && ! integer_zerop (low_bound))
5459 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5462 /* If the index has a self-referential type, pass it to a
5463 WITH_RECORD_EXPR; if the component size is, pass our
5464 component to one. */
5465 if (! TREE_CONSTANT (index)
5466 && contains_placeholder_p (index))
5467 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5468 if (! TREE_CONSTANT (unit_size)
5469 && contains_placeholder_p (unit_size))
5470 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5472 offset = size_binop (PLUS_EXPR, offset,
5473 size_binop (MULT_EXPR,
5474 convert (sizetype, index),
5478 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5480 exp = find_placeholder (exp, &placeholder_ptr);
5483 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5484 && ! ((TREE_CODE (exp) == NOP_EXPR
5485 || TREE_CODE (exp) == CONVERT_EXPR)
5486 && (TYPE_MODE (TREE_TYPE (exp))
5487 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5490 /* If any reference in the chain is volatile, the effect is volatile. */
5491 if (TREE_THIS_VOLATILE (exp))
5494 /* If the offset is non-constant already, then we can't assume any
5495 alignment more than the alignment here. */
5496 if (! TREE_CONSTANT (offset))
5497 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5499 exp = TREE_OPERAND (exp, 0);
5503 alignment = MIN (alignment, DECL_ALIGN (exp));
5504 else if (TREE_TYPE (exp) != 0)
5505 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5507 /* If OFFSET is constant, see if we can return the whole thing as a
5508 constant bit position. Otherwise, split it up. */
5509 if (host_integerp (offset, 0)
5510 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5512 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5513 && host_integerp (tem, 0))
5514 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5516 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5519 *palignment = alignment;
5523 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5525 static enum memory_use_mode
5526 get_memory_usage_from_modifier (modifier)
5527 enum expand_modifier modifier;
5533 return MEMORY_USE_RO;
5535 case EXPAND_MEMORY_USE_WO:
5536 return MEMORY_USE_WO;
5538 case EXPAND_MEMORY_USE_RW:
5539 return MEMORY_USE_RW;
5541 case EXPAND_MEMORY_USE_DONT:
5542 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5543 MEMORY_USE_DONT, because they are modifiers to a call of
5544 expand_expr in the ADDR_EXPR case of expand_expr. */
5545 case EXPAND_CONST_ADDRESS:
5546 case EXPAND_INITIALIZER:
5547 return MEMORY_USE_DONT;
5548 case EXPAND_MEMORY_USE_BAD:
5554 /* Given an rtx VALUE that may contain additions and multiplications, return
5555 an equivalent value that just refers to a register, memory, or constant.
5556 This is done by generating instructions to perform the arithmetic and
5557 returning a pseudo-register containing the value.
5559 The returned value may be a REG, SUBREG, MEM or constant. */
5562 force_operand (value, target)
5566 /* Use a temporary to force order of execution of calls to
5570 /* Use subtarget as the target for operand 0 of a binary operation. */
5571 rtx subtarget = get_subtarget (target);
5573 /* Check for a PIC address load. */
5575 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5576 && XEXP (value, 0) == pic_offset_table_rtx
5577 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5578 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5579 || GET_CODE (XEXP (value, 1)) == CONST))
5582 subtarget = gen_reg_rtx (GET_MODE (value));
5583 emit_move_insn (subtarget, value);
5587 if (GET_CODE (value) == PLUS)
5588 binoptab = add_optab;
5589 else if (GET_CODE (value) == MINUS)
5590 binoptab = sub_optab;
5591 else if (GET_CODE (value) == MULT)
5593 op2 = XEXP (value, 1);
5594 if (!CONSTANT_P (op2)
5595 && !(GET_CODE (op2) == REG && op2 != subtarget))
5597 tmp = force_operand (XEXP (value, 0), subtarget);
5598 return expand_mult (GET_MODE (value), tmp,
5599 force_operand (op2, NULL_RTX),
5605 op2 = XEXP (value, 1);
5606 if (!CONSTANT_P (op2)
5607 && !(GET_CODE (op2) == REG && op2 != subtarget))
5609 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5611 binoptab = add_optab;
5612 op2 = negate_rtx (GET_MODE (value), op2);
5615 /* Check for an addition with OP2 a constant integer and our first
5616 operand a PLUS of a virtual register and something else. In that
5617 case, we want to emit the sum of the virtual register and the
5618 constant first and then add the other value. This allows virtual
5619 register instantiation to simply modify the constant rather than
5620 creating another one around this addition. */
5621 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5622 && GET_CODE (XEXP (value, 0)) == PLUS
5623 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5624 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5625 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5627 rtx temp = expand_binop (GET_MODE (value), binoptab,
5628 XEXP (XEXP (value, 0), 0), op2,
5629 subtarget, 0, OPTAB_LIB_WIDEN);
5630 return expand_binop (GET_MODE (value), binoptab, temp,
5631 force_operand (XEXP (XEXP (value, 0), 1), 0),
5632 target, 0, OPTAB_LIB_WIDEN);
5635 tmp = force_operand (XEXP (value, 0), subtarget);
5636 return expand_binop (GET_MODE (value), binoptab, tmp,
5637 force_operand (op2, NULL_RTX),
5638 target, 0, OPTAB_LIB_WIDEN);
5639 /* We give UNSIGNEDP = 0 to expand_binop
5640 because the only operations we are expanding here are signed ones. */
5645 /* Subroutine of expand_expr: return nonzero iff there is no way that
5646 EXP can reference X, which is being modified. TOP_P is nonzero if this
5647 call is going to be used to determine whether we need a temporary
5648 for EXP, as opposed to a recursive call to this function.
5650 It is always safe for this routine to return zero since it merely
5651 searches for optimization opportunities. */
5654 safe_from_p (x, exp, top_p)
5661 static tree save_expr_list;
5664 /* If EXP has varying size, we MUST use a target since we currently
5665 have no way of allocating temporaries of variable size
5666 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5667 So we assume here that something at a higher level has prevented a
5668 clash. This is somewhat bogus, but the best we can do. Only
5669 do this when X is BLKmode and when we are at the top level. */
5670 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5671 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5672 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5673 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5674 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5676 && GET_MODE (x) == BLKmode)
5677 /* If X is in the outgoing argument area, it is always safe. */
5678 || (GET_CODE (x) == MEM
5679 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5680 || (GET_CODE (XEXP (x, 0)) == PLUS
5681 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5684 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5685 find the underlying pseudo. */
5686 if (GET_CODE (x) == SUBREG)
5689 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5693 /* A SAVE_EXPR might appear many times in the expression passed to the
5694 top-level safe_from_p call, and if it has a complex subexpression,
5695 examining it multiple times could result in a combinatorial explosion.
5696 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5697 with optimization took about 28 minutes to compile -- even though it was
5698 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5699 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5700 we have processed. Note that the only test of top_p was above. */
5709 rtn = safe_from_p (x, exp, 0);
5711 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5712 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5717 /* Now look at our tree code and possibly recurse. */
5718 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5721 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5728 if (TREE_CODE (exp) == TREE_LIST)
5729 return ((TREE_VALUE (exp) == 0
5730 || safe_from_p (x, TREE_VALUE (exp), 0))
5731 && (TREE_CHAIN (exp) == 0
5732 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5733 else if (TREE_CODE (exp) == ERROR_MARK)
5734 return 1; /* An already-visited SAVE_EXPR? */
5739 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5743 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5744 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5748 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5749 the expression. If it is set, we conflict iff we are that rtx or
5750 both are in memory. Otherwise, we check all operands of the
5751 expression recursively. */
5753 switch (TREE_CODE (exp))
5756 return (staticp (TREE_OPERAND (exp, 0))
5757 || TREE_STATIC (exp)
5758 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5761 if (GET_CODE (x) == MEM
5762 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5763 get_alias_set (exp)))
5768 /* Assume that the call will clobber all hard registers and
5770 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5771 || GET_CODE (x) == MEM)
5776 /* If a sequence exists, we would have to scan every instruction
5777 in the sequence to see if it was safe. This is probably not
5779 if (RTL_EXPR_SEQUENCE (exp))
5782 exp_rtl = RTL_EXPR_RTL (exp);
5785 case WITH_CLEANUP_EXPR:
5786 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5789 case CLEANUP_POINT_EXPR:
5790 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5793 exp_rtl = SAVE_EXPR_RTL (exp);
5797 /* If we've already scanned this, don't do it again. Otherwise,
5798 show we've scanned it and record for clearing the flag if we're
5800 if (TREE_PRIVATE (exp))
5803 TREE_PRIVATE (exp) = 1;
5804 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5806 TREE_PRIVATE (exp) = 0;
5810 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5814 /* The only operand we look at is operand 1. The rest aren't
5815 part of the expression. */
5816 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5818 case METHOD_CALL_EXPR:
5819 /* This takes an rtx argument, but shouldn't appear here. */
5826 /* If we have an rtx, we do not need to scan our operands. */
5830 nops = first_rtl_op (TREE_CODE (exp));
5831 for (i = 0; i < nops; i++)
5832 if (TREE_OPERAND (exp, i) != 0
5833 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5836 /* If this is a language-specific tree code, it may require
5837 special handling. */
5838 if ((unsigned int) TREE_CODE (exp)
5839 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5841 && !(*lang_safe_from_p) (x, exp))
5845 /* If we have an rtl, find any enclosed object. Then see if we conflict
5849 if (GET_CODE (exp_rtl) == SUBREG)
5851 exp_rtl = SUBREG_REG (exp_rtl);
5852 if (GET_CODE (exp_rtl) == REG
5853 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5857 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5858 are memory and they conflict. */
5859 return ! (rtx_equal_p (x, exp_rtl)
5860 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5861 && true_dependence (exp_rtl, GET_MODE (x), x,
5862 rtx_addr_varies_p)));
5865 /* If we reach here, it is safe. */
5869 /* Subroutine of expand_expr: return rtx if EXP is a
5870 variable or parameter; else return 0. */
5877 switch (TREE_CODE (exp))
5881 return DECL_RTL (exp);
5887 #ifdef MAX_INTEGER_COMPUTATION_MODE
5890 check_max_integer_computation_mode (exp)
5893 enum tree_code code;
5894 enum machine_mode mode;
5896 /* Strip any NOPs that don't change the mode. */
5898 code = TREE_CODE (exp);
5900 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5901 if (code == NOP_EXPR
5902 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5905 /* First check the type of the overall operation. We need only look at
5906 unary, binary and relational operations. */
5907 if (TREE_CODE_CLASS (code) == '1'
5908 || TREE_CODE_CLASS (code) == '2'
5909 || TREE_CODE_CLASS (code) == '<')
5911 mode = TYPE_MODE (TREE_TYPE (exp));
5912 if (GET_MODE_CLASS (mode) == MODE_INT
5913 && mode > MAX_INTEGER_COMPUTATION_MODE)
5914 internal_error ("unsupported wide integer operation");
5917 /* Check operand of a unary op. */
5918 if (TREE_CODE_CLASS (code) == '1')
5920 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5921 if (GET_MODE_CLASS (mode) == MODE_INT
5922 && mode > MAX_INTEGER_COMPUTATION_MODE)
5923 internal_error ("unsupported wide integer operation");
5926 /* Check operands of a binary/comparison op. */
5927 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5929 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5930 if (GET_MODE_CLASS (mode) == MODE_INT
5931 && mode > MAX_INTEGER_COMPUTATION_MODE)
5932 internal_error ("unsupported wide integer operation");
5934 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5935 if (GET_MODE_CLASS (mode) == MODE_INT
5936 && mode > MAX_INTEGER_COMPUTATION_MODE)
5937 internal_error ("unsupported wide integer operation");
5942 /* Return the highest power of two that EXP is known to be a multiple of.
5943 This is used in updating alignment of MEMs in array references. */
5945 static HOST_WIDE_INT
5946 highest_pow2_factor (exp)
5949 HOST_WIDE_INT c0, c1;
5951 switch (TREE_CODE (exp))
5954 /* If the integer is expressable in a HOST_WIDE_INT, we can find
5955 the lowest bit that's a one. If the result is zero or negative,
5956 pessimize by returning 1. This is overly-conservative, but such
5957 things should not happen in the offset expressions that we are
5959 if (host_integerp (exp, 0))
5961 c0 = tree_low_cst (exp, 0);
5962 return c0 >= 0 ? c0 & -c0 : 1;
5966 case PLUS_EXPR: case MINUS_EXPR:
5967 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5968 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5969 return MIN (c0, c1);
5972 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5973 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5976 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5978 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5979 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5980 return MAX (1, c0 / c1);
5982 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5983 case COMPOUND_EXPR: case SAVE_EXPR:
5984 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5987 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5988 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5989 return MIN (c0, c1);
5998 /* Return an object on the placeholder list that matches EXP, a
5999 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6000 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6001 tree.def. If no such object is found, abort. If PLIST is nonzero, it is
6002 a location which initially points to a starting location in the
6003 placeholder list (zero means start of the list) and where a pointer into
6004 the placeholder list at which the object is found is placed. */
6007 find_placeholder (exp, plist)
6011 tree type = TREE_TYPE (exp);
6012 tree placeholder_expr;
6014 for (placeholder_expr
6015 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6016 placeholder_expr != 0;
6017 placeholder_expr = TREE_CHAIN (placeholder_expr))
6019 tree need_type = TYPE_MAIN_VARIANT (type);
6022 /* Find the outermost reference that is of the type we want. If none,
6023 see if any object has a type that is a pointer to the type we
6025 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6026 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6027 || TREE_CODE (elt) == COND_EXPR)
6028 ? TREE_OPERAND (elt, 1)
6029 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6030 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6031 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6032 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6033 ? TREE_OPERAND (elt, 0) : 0))
6034 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6037 *plist = placeholder_expr;
6041 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6043 = ((TREE_CODE (elt) == COMPOUND_EXPR
6044 || TREE_CODE (elt) == COND_EXPR)
6045 ? TREE_OPERAND (elt, 1)
6046 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6047 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6048 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6049 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6050 ? TREE_OPERAND (elt, 0) : 0))
6051 if (POINTER_TYPE_P (TREE_TYPE (elt))
6052 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6056 *plist = placeholder_expr;
6057 return build1 (INDIRECT_REF, need_type, elt);
6064 /* expand_expr: generate code for computing expression EXP.
6065 An rtx for the computed value is returned. The value is never null.
6066 In the case of a void EXP, const0_rtx is returned.
6068 The value may be stored in TARGET if TARGET is nonzero.
6069 TARGET is just a suggestion; callers must assume that
6070 the rtx returned may not be the same as TARGET.
6072 If TARGET is CONST0_RTX, it means that the value will be ignored.
6074 If TMODE is not VOIDmode, it suggests generating the
6075 result in mode TMODE. But this is done only when convenient.
6076 Otherwise, TMODE is ignored and the value generated in its natural mode.
6077 TMODE is just a suggestion; callers must assume that
6078 the rtx returned may not have mode TMODE.
6080 Note that TARGET may have neither TMODE nor MODE. In that case, it
6081 probably will not be used.
6083 If MODIFIER is EXPAND_SUM then when EXP is an addition
6084 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6085 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6086 products as above, or REG or MEM, or constant.
6087 Ordinarily in such cases we would output mul or add instructions
6088 and then return a pseudo reg containing the sum.
6090 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6091 it also marks a label as absolutely required (it can't be dead).
6092 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6093 This is used for outputting expressions used in initializers.
6095 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6096 with a constant address even if that address is not normally legitimate.
6097 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6100 expand_expr (exp, target, tmode, modifier)
6103 enum machine_mode tmode;
6104 enum expand_modifier modifier;
6107 tree type = TREE_TYPE (exp);
6108 int unsignedp = TREE_UNSIGNED (type);
6109 enum machine_mode mode;
6110 enum tree_code code = TREE_CODE (exp);
6112 rtx subtarget, original_target;
6115 /* Used by check-memory-usage to make modifier read only. */
6116 enum expand_modifier ro_modifier;
6118 /* Handle ERROR_MARK before anybody tries to access its type. */
6119 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6121 op0 = CONST0_RTX (tmode);
6127 mode = TYPE_MODE (type);
6128 /* Use subtarget as the target for operand 0 of a binary operation. */
6129 subtarget = get_subtarget (target);
6130 original_target = target;
6131 ignore = (target == const0_rtx
6132 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6133 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6134 || code == COND_EXPR)
6135 && TREE_CODE (type) == VOID_TYPE));
6137 /* Make a read-only version of the modifier. */
6138 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6139 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6140 ro_modifier = modifier;
6142 ro_modifier = EXPAND_NORMAL;
6144 /* If we are going to ignore this result, we need only do something
6145 if there is a side-effect somewhere in the expression. If there
6146 is, short-circuit the most common cases here. Note that we must
6147 not call expand_expr with anything but const0_rtx in case this
6148 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6152 if (! TREE_SIDE_EFFECTS (exp))
6155 /* Ensure we reference a volatile object even if value is ignored, but
6156 don't do this if all we are doing is taking its address. */
6157 if (TREE_THIS_VOLATILE (exp)
6158 && TREE_CODE (exp) != FUNCTION_DECL
6159 && mode != VOIDmode && mode != BLKmode
6160 && modifier != EXPAND_CONST_ADDRESS)
6162 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6163 if (GET_CODE (temp) == MEM)
6164 temp = copy_to_reg (temp);
6168 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6169 || code == INDIRECT_REF || code == BUFFER_REF)
6170 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6171 VOIDmode, ro_modifier);
6172 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6173 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6175 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6177 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6181 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6182 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6183 /* If the second operand has no side effects, just evaluate
6185 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6186 VOIDmode, ro_modifier);
6187 else if (code == BIT_FIELD_REF)
6189 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6191 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6193 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6201 #ifdef MAX_INTEGER_COMPUTATION_MODE
6202 /* Only check stuff here if the mode we want is different from the mode
6203 of the expression; if it's the same, check_max_integer_computiation_mode
6204 will handle it. Do we really need to check this stuff at all? */
6207 && GET_MODE (target) != mode
6208 && TREE_CODE (exp) != INTEGER_CST
6209 && TREE_CODE (exp) != PARM_DECL
6210 && TREE_CODE (exp) != ARRAY_REF
6211 && TREE_CODE (exp) != ARRAY_RANGE_REF
6212 && TREE_CODE (exp) != COMPONENT_REF
6213 && TREE_CODE (exp) != BIT_FIELD_REF
6214 && TREE_CODE (exp) != INDIRECT_REF
6215 && TREE_CODE (exp) != CALL_EXPR
6216 && TREE_CODE (exp) != VAR_DECL
6217 && TREE_CODE (exp) != RTL_EXPR)
6219 enum machine_mode mode = GET_MODE (target);
6221 if (GET_MODE_CLASS (mode) == MODE_INT
6222 && mode > MAX_INTEGER_COMPUTATION_MODE)
6223 internal_error ("unsupported wide integer operation");
6227 && TREE_CODE (exp) != INTEGER_CST
6228 && TREE_CODE (exp) != PARM_DECL
6229 && TREE_CODE (exp) != ARRAY_REF
6230 && TREE_CODE (exp) != ARRAY_RANGE_REF
6231 && TREE_CODE (exp) != COMPONENT_REF
6232 && TREE_CODE (exp) != BIT_FIELD_REF
6233 && TREE_CODE (exp) != INDIRECT_REF
6234 && TREE_CODE (exp) != VAR_DECL
6235 && TREE_CODE (exp) != CALL_EXPR
6236 && TREE_CODE (exp) != RTL_EXPR
6237 && GET_MODE_CLASS (tmode) == MODE_INT
6238 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6239 internal_error ("unsupported wide integer operation");
6241 check_max_integer_computation_mode (exp);
6244 /* If will do cse, generate all results into pseudo registers
6245 since 1) that allows cse to find more things
6246 and 2) otherwise cse could produce an insn the machine
6249 if (! cse_not_expected && mode != BLKmode && target
6250 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6257 tree function = decl_function_context (exp);
6258 /* Handle using a label in a containing function. */
6259 if (function != current_function_decl
6260 && function != inline_function_decl && function != 0)
6262 struct function *p = find_function_data (function);
6263 p->expr->x_forced_labels
6264 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6265 p->expr->x_forced_labels);
6269 if (modifier == EXPAND_INITIALIZER)
6270 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6275 temp = gen_rtx_MEM (FUNCTION_MODE,
6276 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6277 if (function != current_function_decl
6278 && function != inline_function_decl && function != 0)
6279 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6284 if (DECL_RTL (exp) == 0)
6286 error_with_decl (exp, "prior parameter's size depends on `%s'");
6287 return CONST0_RTX (mode);
6290 /* ... fall through ... */
6293 /* If a static var's type was incomplete when the decl was written,
6294 but the type is complete now, lay out the decl now. */
6295 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6296 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6298 layout_decl (exp, 0);
6299 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6302 /* Although static-storage variables start off initialized, according to
6303 ANSI C, a memcpy could overwrite them with uninitialized values. So
6304 we check them too. This also lets us check for read-only variables
6305 accessed via a non-const declaration, in case it won't be detected
6306 any other way (e.g., in an embedded system or OS kernel without
6309 Aggregates are not checked here; they're handled elsewhere. */
6310 if (cfun && current_function_check_memory_usage
6312 && GET_CODE (DECL_RTL (exp)) == MEM
6313 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6315 enum memory_use_mode memory_usage;
6316 memory_usage = get_memory_usage_from_modifier (modifier);
6318 in_check_memory_usage = 1;
6319 if (memory_usage != MEMORY_USE_DONT)
6320 emit_library_call (chkr_check_addr_libfunc,
6321 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6322 XEXP (DECL_RTL (exp), 0), Pmode,
6323 GEN_INT (int_size_in_bytes (type)),
6324 TYPE_MODE (sizetype),
6325 GEN_INT (memory_usage),
6326 TYPE_MODE (integer_type_node));
6327 in_check_memory_usage = 0;
6330 /* ... fall through ... */
6334 if (DECL_RTL (exp) == 0)
6337 /* Ensure variable marked as used even if it doesn't go through
6338 a parser. If it hasn't be used yet, write out an external
6340 if (! TREE_USED (exp))
6342 assemble_external (exp);
6343 TREE_USED (exp) = 1;
6346 /* Show we haven't gotten RTL for this yet. */
6349 /* Handle variables inherited from containing functions. */
6350 context = decl_function_context (exp);
6352 /* We treat inline_function_decl as an alias for the current function
6353 because that is the inline function whose vars, types, etc.
6354 are being merged into the current function.
6355 See expand_inline_function. */
6357 if (context != 0 && context != current_function_decl
6358 && context != inline_function_decl
6359 /* If var is static, we don't need a static chain to access it. */
6360 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6361 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6365 /* Mark as non-local and addressable. */
6366 DECL_NONLOCAL (exp) = 1;
6367 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6369 mark_addressable (exp);
6370 if (GET_CODE (DECL_RTL (exp)) != MEM)
6372 addr = XEXP (DECL_RTL (exp), 0);
6373 if (GET_CODE (addr) == MEM)
6375 = replace_equiv_address (addr,
6376 fix_lexical_addr (XEXP (addr, 0), exp));
6378 addr = fix_lexical_addr (addr, exp);
6380 temp = replace_equiv_address (DECL_RTL (exp), addr);
6383 /* This is the case of an array whose size is to be determined
6384 from its initializer, while the initializer is still being parsed.
6387 else if (GET_CODE (DECL_RTL (exp)) == MEM
6388 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6389 temp = validize_mem (DECL_RTL (exp));
6391 /* If DECL_RTL is memory, we are in the normal case and either
6392 the address is not valid or it is not a register and -fforce-addr
6393 is specified, get the address into a register. */
6395 else if (GET_CODE (DECL_RTL (exp)) == MEM
6396 && modifier != EXPAND_CONST_ADDRESS
6397 && modifier != EXPAND_SUM
6398 && modifier != EXPAND_INITIALIZER
6399 && (! memory_address_p (DECL_MODE (exp),
6400 XEXP (DECL_RTL (exp), 0))
6402 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6403 temp = replace_equiv_address (DECL_RTL (exp),
6404 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6406 /* If we got something, return it. But first, set the alignment
6407 if the address is a register. */
6410 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6411 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6416 /* If the mode of DECL_RTL does not match that of the decl, it
6417 must be a promoted value. We return a SUBREG of the wanted mode,
6418 but mark it so that we know that it was already extended. */
6420 if (GET_CODE (DECL_RTL (exp)) == REG
6421 && GET_MODE (DECL_RTL (exp)) != mode)
6423 /* Get the signedness used for this variable. Ensure we get the
6424 same mode we got when the variable was declared. */
6425 if (GET_MODE (DECL_RTL (exp))
6426 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6429 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6430 SUBREG_PROMOTED_VAR_P (temp) = 1;
6431 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6435 return DECL_RTL (exp);
6438 return immed_double_const (TREE_INT_CST_LOW (exp),
6439 TREE_INT_CST_HIGH (exp), mode);
6442 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6443 EXPAND_MEMORY_USE_BAD);
6446 /* If optimized, generate immediate CONST_DOUBLE
6447 which will be turned into memory by reload if necessary.
6449 We used to force a register so that loop.c could see it. But
6450 this does not allow gen_* patterns to perform optimizations with
6451 the constants. It also produces two insns in cases like "x = 1.0;".
6452 On most machines, floating-point constants are not permitted in
6453 many insns, so we'd end up copying it to a register in any case.
6455 Now, we do the copying in expand_binop, if appropriate. */
6456 return immed_real_const (exp);
6460 if (! TREE_CST_RTL (exp))
6461 output_constant_def (exp, 1);
6463 /* TREE_CST_RTL probably contains a constant address.
6464 On RISC machines where a constant address isn't valid,
6465 make some insns to get that address into a register. */
6466 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6467 && modifier != EXPAND_CONST_ADDRESS
6468 && modifier != EXPAND_INITIALIZER
6469 && modifier != EXPAND_SUM
6470 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6472 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6473 return replace_equiv_address (TREE_CST_RTL (exp),
6474 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6475 return TREE_CST_RTL (exp);
6477 case EXPR_WITH_FILE_LOCATION:
6480 const char *saved_input_filename = input_filename;
6481 int saved_lineno = lineno;
6482 input_filename = EXPR_WFL_FILENAME (exp);
6483 lineno = EXPR_WFL_LINENO (exp);
6484 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6485 emit_line_note (input_filename, lineno);
6486 /* Possibly avoid switching back and forth here. */
6487 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6488 input_filename = saved_input_filename;
6489 lineno = saved_lineno;
6494 context = decl_function_context (exp);
6496 /* If this SAVE_EXPR was at global context, assume we are an
6497 initialization function and move it into our context. */
6499 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6501 /* We treat inline_function_decl as an alias for the current function
6502 because that is the inline function whose vars, types, etc.
6503 are being merged into the current function.
6504 See expand_inline_function. */
6505 if (context == current_function_decl || context == inline_function_decl)
6508 /* If this is non-local, handle it. */
6511 /* The following call just exists to abort if the context is
6512 not of a containing function. */
6513 find_function_data (context);
6515 temp = SAVE_EXPR_RTL (exp);
6516 if (temp && GET_CODE (temp) == REG)
6518 put_var_into_stack (exp);
6519 temp = SAVE_EXPR_RTL (exp);
6521 if (temp == 0 || GET_CODE (temp) != MEM)
6524 replace_equiv_address (temp,
6525 fix_lexical_addr (XEXP (temp, 0), exp));
6527 if (SAVE_EXPR_RTL (exp) == 0)
6529 if (mode == VOIDmode)
6532 temp = assign_temp (build_qualified_type (type,
6534 | TYPE_QUAL_CONST)),
6537 SAVE_EXPR_RTL (exp) = temp;
6538 if (!optimize && GET_CODE (temp) == REG)
6539 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6542 /* If the mode of TEMP does not match that of the expression, it
6543 must be a promoted value. We pass store_expr a SUBREG of the
6544 wanted mode but mark it so that we know that it was already
6545 extended. Note that `unsignedp' was modified above in
6548 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6550 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6551 SUBREG_PROMOTED_VAR_P (temp) = 1;
6552 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6555 if (temp == const0_rtx)
6556 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6557 EXPAND_MEMORY_USE_BAD);
6559 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6561 TREE_USED (exp) = 1;
6564 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6565 must be a promoted value. We return a SUBREG of the wanted mode,
6566 but mark it so that we know that it was already extended. */
6568 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6569 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6571 /* Compute the signedness and make the proper SUBREG. */
6572 promote_mode (type, mode, &unsignedp, 0);
6573 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6574 SUBREG_PROMOTED_VAR_P (temp) = 1;
6575 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6579 return SAVE_EXPR_RTL (exp);
6584 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6585 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6589 case PLACEHOLDER_EXPR:
6591 tree old_list = placeholder_list;
6592 tree placeholder_expr = 0;
6594 exp = find_placeholder (exp, &placeholder_expr);
6595 placeholder_list = TREE_CHAIN (placeholder_expr);
6596 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6597 placeholder_list = old_list;
6601 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6604 case WITH_RECORD_EXPR:
6605 /* Put the object on the placeholder list, expand our first operand,
6606 and pop the list. */
6607 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6609 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6610 tmode, ro_modifier);
6611 placeholder_list = TREE_CHAIN (placeholder_list);
6615 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6616 expand_goto (TREE_OPERAND (exp, 0));
6618 expand_computed_goto (TREE_OPERAND (exp, 0));
6622 expand_exit_loop_if_false (NULL,
6623 invert_truthvalue (TREE_OPERAND (exp, 0)));
6626 case LABELED_BLOCK_EXPR:
6627 if (LABELED_BLOCK_BODY (exp))
6628 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6629 /* Should perhaps use expand_label, but this is simpler and safer. */
6630 do_pending_stack_adjust ();
6631 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6634 case EXIT_BLOCK_EXPR:
6635 if (EXIT_BLOCK_RETURN (exp))
6636 sorry ("returned value in block_exit_expr");
6637 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6642 expand_start_loop (1);
6643 expand_expr_stmt (TREE_OPERAND (exp, 0));
6651 tree vars = TREE_OPERAND (exp, 0);
6652 int vars_need_expansion = 0;
6654 /* Need to open a binding contour here because
6655 if there are any cleanups they must be contained here. */
6656 expand_start_bindings (2);
6658 /* Mark the corresponding BLOCK for output in its proper place. */
6659 if (TREE_OPERAND (exp, 2) != 0
6660 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6661 insert_block (TREE_OPERAND (exp, 2));
6663 /* If VARS have not yet been expanded, expand them now. */
6666 if (!DECL_RTL_SET_P (vars))
6668 vars_need_expansion = 1;
6671 expand_decl_init (vars);
6672 vars = TREE_CHAIN (vars);
6675 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6677 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6683 if (RTL_EXPR_SEQUENCE (exp))
6685 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6687 emit_insns (RTL_EXPR_SEQUENCE (exp));
6688 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6690 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6691 free_temps_for_rtl_expr (exp);
6692 return RTL_EXPR_RTL (exp);
6695 /* If we don't need the result, just ensure we evaluate any
6700 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6701 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6702 EXPAND_MEMORY_USE_BAD);
6706 /* All elts simple constants => refer to a constant in memory. But
6707 if this is a non-BLKmode mode, let it store a field at a time
6708 since that should make a CONST_INT or CONST_DOUBLE when we
6709 fold. Likewise, if we have a target we can use, it is best to
6710 store directly into the target unless the type is large enough
6711 that memcpy will be used. If we are making an initializer and
6712 all operands are constant, put it in memory as well. */
6713 else if ((TREE_STATIC (exp)
6714 && ((mode == BLKmode
6715 && ! (target != 0 && safe_from_p (target, exp, 1)))
6716 || TREE_ADDRESSABLE (exp)
6717 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6718 && (! MOVE_BY_PIECES_P
6719 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6721 && ! mostly_zeros_p (exp))))
6722 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6724 rtx constructor = output_constant_def (exp, 1);
6726 if (modifier != EXPAND_CONST_ADDRESS
6727 && modifier != EXPAND_INITIALIZER
6728 && modifier != EXPAND_SUM)
6729 constructor = validize_mem (constructor);
6735 /* Handle calls that pass values in multiple non-contiguous
6736 locations. The Irix 6 ABI has examples of this. */
6737 if (target == 0 || ! safe_from_p (target, exp, 1)
6738 || GET_CODE (target) == PARALLEL)
6740 = assign_temp (build_qualified_type (type,
6742 | (TREE_READONLY (exp)
6743 * TYPE_QUAL_CONST))),
6744 TREE_ADDRESSABLE (exp), 1, 1);
6746 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6747 int_size_in_bytes (TREE_TYPE (exp)));
6753 tree exp1 = TREE_OPERAND (exp, 0);
6755 tree string = string_constant (exp1, &index);
6757 /* Try to optimize reads from const strings. */
6759 && TREE_CODE (string) == STRING_CST
6760 && TREE_CODE (index) == INTEGER_CST
6761 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6762 && GET_MODE_CLASS (mode) == MODE_INT
6763 && GET_MODE_SIZE (mode) == 1
6764 && modifier != EXPAND_MEMORY_USE_WO)
6766 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6768 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6769 op0 = memory_address (mode, op0);
6771 if (cfun && current_function_check_memory_usage
6772 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6774 enum memory_use_mode memory_usage;
6775 memory_usage = get_memory_usage_from_modifier (modifier);
6777 if (memory_usage != MEMORY_USE_DONT)
6779 in_check_memory_usage = 1;
6780 emit_library_call (chkr_check_addr_libfunc,
6781 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6782 Pmode, GEN_INT (int_size_in_bytes (type)),
6783 TYPE_MODE (sizetype),
6784 GEN_INT (memory_usage),
6785 TYPE_MODE (integer_type_node));
6786 in_check_memory_usage = 0;
6790 temp = gen_rtx_MEM (mode, op0);
6791 set_mem_attributes (temp, exp, 0);
6793 /* If we are writing to this object and its type is a record with
6794 readonly fields, we must mark it as readonly so it will
6795 conflict with readonly references to those fields. */
6796 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6797 RTX_UNCHANGING_P (temp) = 1;
6803 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6807 tree array = TREE_OPERAND (exp, 0);
6808 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6809 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6810 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6813 /* Optimize the special-case of a zero lower bound.
6815 We convert the low_bound to sizetype to avoid some problems
6816 with constant folding. (E.g. suppose the lower bound is 1,
6817 and its mode is QI. Without the conversion, (ARRAY
6818 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6819 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6821 if (! integer_zerop (low_bound))
6822 index = size_diffop (index, convert (sizetype, low_bound));
6824 /* Fold an expression like: "foo"[2].
6825 This is not done in fold so it won't happen inside &.
6826 Don't fold if this is for wide characters since it's too
6827 difficult to do correctly and this is a very rare case. */
6829 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6830 && TREE_CODE (array) == STRING_CST
6831 && TREE_CODE (index) == INTEGER_CST
6832 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6833 && GET_MODE_CLASS (mode) == MODE_INT
6834 && GET_MODE_SIZE (mode) == 1)
6836 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6838 /* If this is a constant index into a constant array,
6839 just get the value from the array. Handle both the cases when
6840 we have an explicit constructor and when our operand is a variable
6841 that was declared const. */
6843 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6844 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6845 && TREE_CODE (index) == INTEGER_CST
6846 && 0 > compare_tree_int (index,
6847 list_length (CONSTRUCTOR_ELTS
6848 (TREE_OPERAND (exp, 0)))))
6852 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6853 i = TREE_INT_CST_LOW (index);
6854 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6858 return expand_expr (fold (TREE_VALUE (elem)), target,
6859 tmode, ro_modifier);
6862 else if (optimize >= 1
6863 && modifier != EXPAND_CONST_ADDRESS
6864 && modifier != EXPAND_INITIALIZER
6865 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6866 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6867 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6869 if (TREE_CODE (index) == INTEGER_CST)
6871 tree init = DECL_INITIAL (array);
6873 if (TREE_CODE (init) == CONSTRUCTOR)
6877 for (elem = CONSTRUCTOR_ELTS (init);
6879 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6880 elem = TREE_CHAIN (elem))
6883 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6884 return expand_expr (fold (TREE_VALUE (elem)), target,
6885 tmode, ro_modifier);
6887 else if (TREE_CODE (init) == STRING_CST
6888 && 0 > compare_tree_int (index,
6889 TREE_STRING_LENGTH (init)))
6891 tree type = TREE_TYPE (TREE_TYPE (init));
6892 enum machine_mode mode = TYPE_MODE (type);
6894 if (GET_MODE_CLASS (mode) == MODE_INT
6895 && GET_MODE_SIZE (mode) == 1)
6897 (TREE_STRING_POINTER
6898 (init)[TREE_INT_CST_LOW (index)]));
6907 case ARRAY_RANGE_REF:
6908 /* If the operand is a CONSTRUCTOR, we can just extract the
6909 appropriate field if it is present. Don't do this if we have
6910 already written the data since we want to refer to that copy
6911 and varasm.c assumes that's what we'll do. */
6912 if (code == COMPONENT_REF
6913 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6914 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6918 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6919 elt = TREE_CHAIN (elt))
6920 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6921 /* We can normally use the value of the field in the
6922 CONSTRUCTOR. However, if this is a bitfield in
6923 an integral mode that we can fit in a HOST_WIDE_INT,
6924 we must mask only the number of bits in the bitfield,
6925 since this is done implicitly by the constructor. If
6926 the bitfield does not meet either of those conditions,
6927 we can't do this optimization. */
6928 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6929 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6931 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6932 <= HOST_BITS_PER_WIDE_INT))))
6934 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6935 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6937 HOST_WIDE_INT bitsize
6938 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6940 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6942 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6943 op0 = expand_and (op0, op1, target);
6947 enum machine_mode imode
6948 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6950 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6953 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6955 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6965 enum machine_mode mode1;
6966 HOST_WIDE_INT bitsize, bitpos;
6969 unsigned int alignment;
6970 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6971 &mode1, &unsignedp, &volatilep,
6975 /* If we got back the original object, something is wrong. Perhaps
6976 we are evaluating an expression too early. In any event, don't
6977 infinitely recurse. */
6981 /* If TEM's type is a union of variable size, pass TARGET to the inner
6982 computation, since it will need a temporary and TARGET is known
6983 to have to do. This occurs in unchecked conversion in Ada. */
6987 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6988 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6990 ? target : NULL_RTX),
6992 (modifier == EXPAND_INITIALIZER
6993 || modifier == EXPAND_CONST_ADDRESS)
6994 ? modifier : EXPAND_NORMAL);
6996 /* If this is a constant, put it into a register if it is a
6997 legitimate constant and OFFSET is 0 and memory if it isn't. */
6998 if (CONSTANT_P (op0))
7000 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7001 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7003 op0 = force_reg (mode, op0);
7005 op0 = validize_mem (force_const_mem (mode, op0));
7010 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7012 /* If this object is in a register, put it into memory.
7013 This case can't occur in C, but can in Ada if we have
7014 unchecked conversion of an expression from a scalar type to
7015 an array or record type. */
7016 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7017 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7019 /* If the operand is a SAVE_EXPR, we can deal with this by
7020 forcing the SAVE_EXPR into memory. */
7021 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7023 put_var_into_stack (TREE_OPERAND (exp, 0));
7024 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7029 = build_qualified_type (TREE_TYPE (tem),
7030 (TYPE_QUALS (TREE_TYPE (tem))
7031 | TYPE_QUAL_CONST));
7032 rtx memloc = assign_temp (nt, 1, 1, 1);
7034 mark_temp_addr_taken (memloc);
7035 emit_move_insn (memloc, op0);
7040 if (GET_CODE (op0) != MEM)
7043 if (GET_MODE (offset_rtx) != ptr_mode)
7044 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7046 #ifdef POINTERS_EXTEND_UNSIGNED
7047 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7050 /* A constant address in OP0 can have VOIDmode, we must not try
7051 to call force_reg for that case. Avoid that case. */
7052 if (GET_CODE (op0) == MEM
7053 && GET_MODE (op0) == BLKmode
7054 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7056 && (bitpos % bitsize) == 0
7057 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7058 && alignment == GET_MODE_ALIGNMENT (mode1))
7060 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7062 if (GET_CODE (XEXP (temp, 0)) == REG)
7065 op0 = (replace_equiv_address
7067 force_reg (GET_MODE (XEXP (temp, 0)),
7072 op0 = offset_address (op0, offset_rtx,
7073 highest_pow2_factor (offset));
7076 /* Don't forget about volatility even if this is a bitfield. */
7077 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7079 if (op0 == orig_op0)
7080 op0 = copy_rtx (op0);
7082 MEM_VOLATILE_P (op0) = 1;
7085 /* Check the access. */
7086 if (cfun != 0 && current_function_check_memory_usage
7087 && GET_CODE (op0) == MEM)
7089 enum memory_use_mode memory_usage;
7090 memory_usage = get_memory_usage_from_modifier (modifier);
7092 if (memory_usage != MEMORY_USE_DONT)
7097 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7098 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7100 /* Check the access right of the pointer. */
7101 in_check_memory_usage = 1;
7102 if (size > BITS_PER_UNIT)
7103 emit_library_call (chkr_check_addr_libfunc,
7104 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7105 Pmode, GEN_INT (size / BITS_PER_UNIT),
7106 TYPE_MODE (sizetype),
7107 GEN_INT (memory_usage),
7108 TYPE_MODE (integer_type_node));
7109 in_check_memory_usage = 0;
7113 /* In cases where an aligned union has an unaligned object
7114 as a field, we might be extracting a BLKmode value from
7115 an integer-mode (e.g., SImode) object. Handle this case
7116 by doing the extract into an object as wide as the field
7117 (which we know to be the width of a basic mode), then
7118 storing into memory, and changing the mode to BLKmode. */
7119 if (mode1 == VOIDmode
7120 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7121 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7122 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7123 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7124 && modifier != EXPAND_CONST_ADDRESS
7125 && modifier != EXPAND_INITIALIZER)
7126 /* If the field isn't aligned enough to fetch as a memref,
7127 fetch it as a bit field. */
7128 || (mode1 != BLKmode
7129 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7130 && ((TYPE_ALIGN (TREE_TYPE (tem))
7131 < GET_MODE_ALIGNMENT (mode))
7132 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7133 /* If the type and the field are a constant size and the
7134 size of the type isn't the same size as the bitfield,
7135 we must use bitfield operations. */
7137 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7139 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7142 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7143 && (TYPE_ALIGN (type) > alignment
7144 || bitpos % TYPE_ALIGN (type) != 0)))
7146 enum machine_mode ext_mode = mode;
7148 if (ext_mode == BLKmode
7149 && ! (target != 0 && GET_CODE (op0) == MEM
7150 && GET_CODE (target) == MEM
7151 && bitpos % BITS_PER_UNIT == 0))
7152 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7154 if (ext_mode == BLKmode)
7156 /* In this case, BITPOS must start at a byte boundary and
7157 TARGET, if specified, must be a MEM. */
7158 if (GET_CODE (op0) != MEM
7159 || (target != 0 && GET_CODE (target) != MEM)
7160 || bitpos % BITS_PER_UNIT != 0)
7163 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7165 target = assign_temp (type, 0, 1, 1);
7167 emit_block_move (target, op0,
7168 bitsize == -1 ? expr_size (exp)
7169 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7175 op0 = validize_mem (op0);
7177 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7178 mark_reg_pointer (XEXP (op0, 0), alignment);
7180 op0 = extract_bit_field (op0, bitsize, bitpos,
7181 unsignedp, target, ext_mode, ext_mode,
7183 int_size_in_bytes (TREE_TYPE (tem)));
7185 /* If the result is a record type and BITSIZE is narrower than
7186 the mode of OP0, an integral mode, and this is a big endian
7187 machine, we must put the field into the high-order bits. */
7188 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7189 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7190 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7191 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7192 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7196 if (mode == BLKmode)
7198 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7200 rtx new = assign_temp (nt, 0, 1, 1);
7202 emit_move_insn (new, op0);
7203 op0 = copy_rtx (new);
7204 PUT_MODE (op0, BLKmode);
7210 /* If the result is BLKmode, use that to access the object
7212 if (mode == BLKmode)
7215 /* Get a reference to just this component. */
7216 if (modifier == EXPAND_CONST_ADDRESS
7217 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7218 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7220 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7222 if (op0 == orig_op0)
7223 op0 = copy_rtx (op0);
7225 set_mem_attributes (op0, exp, 0);
7226 if (GET_CODE (XEXP (op0, 0)) == REG)
7227 mark_reg_pointer (XEXP (op0, 0), alignment);
7229 MEM_VOLATILE_P (op0) |= volatilep;
7230 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7231 || modifier == EXPAND_CONST_ADDRESS
7232 || modifier == EXPAND_INITIALIZER)
7234 else if (target == 0)
7235 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7237 convert_move (target, op0, unsignedp);
7243 rtx insn, before = get_last_insn (), vtbl_ref;
7245 /* Evaluate the interior expression. */
7246 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7249 /* Get or create an instruction off which to hang a note. */
7250 if (REG_P (subtarget))
7253 insn = get_last_insn ();
7256 if (! INSN_P (insn))
7257 insn = prev_nonnote_insn (insn);
7261 target = gen_reg_rtx (GET_MODE (subtarget));
7262 insn = emit_move_insn (target, subtarget);
7265 /* Collect the data for the note. */
7266 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7267 vtbl_ref = plus_constant (vtbl_ref,
7268 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7269 /* Discard the initial CONST that was added. */
7270 vtbl_ref = XEXP (vtbl_ref, 0);
7273 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7278 /* Intended for a reference to a buffer of a file-object in Pascal.
7279 But it's not certain that a special tree code will really be
7280 necessary for these. INDIRECT_REF might work for them. */
7286 /* Pascal set IN expression.
7289 rlo = set_low - (set_low%bits_per_word);
7290 the_word = set [ (index - rlo)/bits_per_word ];
7291 bit_index = index % bits_per_word;
7292 bitmask = 1 << bit_index;
7293 return !!(the_word & bitmask); */
7295 tree set = TREE_OPERAND (exp, 0);
7296 tree index = TREE_OPERAND (exp, 1);
7297 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7298 tree set_type = TREE_TYPE (set);
7299 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7300 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7301 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7302 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7303 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7304 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7305 rtx setaddr = XEXP (setval, 0);
7306 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308 rtx diff, quo, rem, addr, bit, result;
7310 /* If domain is empty, answer is no. Likewise if index is constant
7311 and out of bounds. */
7312 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7313 && TREE_CODE (set_low_bound) == INTEGER_CST
7314 && tree_int_cst_lt (set_high_bound, set_low_bound))
7315 || (TREE_CODE (index) == INTEGER_CST
7316 && TREE_CODE (set_low_bound) == INTEGER_CST
7317 && tree_int_cst_lt (index, set_low_bound))
7318 || (TREE_CODE (set_high_bound) == INTEGER_CST
7319 && TREE_CODE (index) == INTEGER_CST
7320 && tree_int_cst_lt (set_high_bound, index))))
7324 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7326 /* If we get here, we have to generate the code for both cases
7327 (in range and out of range). */
7329 op0 = gen_label_rtx ();
7330 op1 = gen_label_rtx ();
7332 if (! (GET_CODE (index_val) == CONST_INT
7333 && GET_CODE (lo_r) == CONST_INT))
7335 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7336 GET_MODE (index_val), iunsignedp, 0, op1);
7339 if (! (GET_CODE (index_val) == CONST_INT
7340 && GET_CODE (hi_r) == CONST_INT))
7342 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7343 GET_MODE (index_val), iunsignedp, 0, op1);
7346 /* Calculate the element number of bit zero in the first word
7348 if (GET_CODE (lo_r) == CONST_INT)
7349 rlow = GEN_INT (INTVAL (lo_r)
7350 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7352 rlow = expand_binop (index_mode, and_optab, lo_r,
7353 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7354 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7356 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7357 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7359 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7360 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7361 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7362 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7364 addr = memory_address (byte_mode,
7365 expand_binop (index_mode, add_optab, diff,
7366 setaddr, NULL_RTX, iunsignedp,
7369 /* Extract the bit we want to examine. */
7370 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7371 gen_rtx_MEM (byte_mode, addr),
7372 make_tree (TREE_TYPE (index), rem),
7374 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7375 GET_MODE (target) == byte_mode ? target : 0,
7376 1, OPTAB_LIB_WIDEN);
7378 if (result != target)
7379 convert_move (target, result, 1);
7381 /* Output the code to handle the out-of-range case. */
7384 emit_move_insn (target, const0_rtx);
7389 case WITH_CLEANUP_EXPR:
7390 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7392 WITH_CLEANUP_EXPR_RTL (exp)
7393 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7394 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7396 /* That's it for this cleanup. */
7397 TREE_OPERAND (exp, 1) = 0;
7399 return WITH_CLEANUP_EXPR_RTL (exp);
7401 case CLEANUP_POINT_EXPR:
7403 /* Start a new binding layer that will keep track of all cleanup
7404 actions to be performed. */
7405 expand_start_bindings (2);
7407 target_temp_slot_level = temp_slot_level;
7409 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7410 /* If we're going to use this value, load it up now. */
7412 op0 = force_not_mem (op0);
7413 preserve_temp_slots (op0);
7414 expand_end_bindings (NULL_TREE, 0, 0);
7419 /* Check for a built-in function. */
7420 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7421 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7423 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7425 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7426 == BUILT_IN_FRONTEND)
7427 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7429 return expand_builtin (exp, target, subtarget, tmode, ignore);
7432 return expand_call (exp, target, ignore);
7434 case NON_LVALUE_EXPR:
7437 case REFERENCE_EXPR:
7438 if (TREE_OPERAND (exp, 0) == error_mark_node)
7441 if (TREE_CODE (type) == UNION_TYPE)
7443 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7445 /* If both input and output are BLKmode, this conversion
7446 isn't actually doing anything unless we need to make the
7447 alignment stricter. */
7448 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7449 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7450 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7451 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7455 target = assign_temp (type, 0, 1, 1);
7457 if (GET_CODE (target) == MEM)
7458 /* Store data into beginning of memory target. */
7459 store_expr (TREE_OPERAND (exp, 0),
7460 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7462 else if (GET_CODE (target) == REG)
7463 /* Store this field into a union of the proper type. */
7464 store_field (target,
7465 MIN ((int_size_in_bytes (TREE_TYPE
7466 (TREE_OPERAND (exp, 0)))
7468 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7469 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7470 VOIDmode, 0, BITS_PER_UNIT,
7471 int_size_in_bytes (type), 0);
7475 /* Return the entire union. */
7479 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7481 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7484 /* If the signedness of the conversion differs and OP0 is
7485 a promoted SUBREG, clear that indication since we now
7486 have to do the proper extension. */
7487 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7488 && GET_CODE (op0) == SUBREG)
7489 SUBREG_PROMOTED_VAR_P (op0) = 0;
7494 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7495 if (GET_MODE (op0) == mode)
7498 /* If OP0 is a constant, just convert it into the proper mode. */
7499 if (CONSTANT_P (op0))
7501 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7502 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7504 if (modifier == EXPAND_INITIALIZER)
7505 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7509 convert_to_mode (mode, op0,
7510 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7512 convert_move (target, op0,
7513 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7517 /* We come here from MINUS_EXPR when the second operand is a
7520 this_optab = ! unsignedp && flag_trapv
7521 && (GET_MODE_CLASS(mode) == MODE_INT)
7522 ? addv_optab : add_optab;
7524 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7525 something else, make sure we add the register to the constant and
7526 then to the other thing. This case can occur during strength
7527 reduction and doing it this way will produce better code if the
7528 frame pointer or argument pointer is eliminated.
7530 fold-const.c will ensure that the constant is always in the inner
7531 PLUS_EXPR, so the only case we need to do anything about is if
7532 sp, ap, or fp is our second argument, in which case we must swap
7533 the innermost first argument and our second argument. */
7535 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7536 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7537 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7538 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7539 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7540 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7542 tree t = TREE_OPERAND (exp, 1);
7544 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7545 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7548 /* If the result is to be ptr_mode and we are adding an integer to
7549 something, we might be forming a constant. So try to use
7550 plus_constant. If it produces a sum and we can't accept it,
7551 use force_operand. This allows P = &ARR[const] to generate
7552 efficient code on machines where a SYMBOL_REF is not a valid
7555 If this is an EXPAND_SUM call, always return the sum. */
7556 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7557 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7559 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7560 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7561 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7565 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7567 /* Use immed_double_const to ensure that the constant is
7568 truncated according to the mode of OP1, then sign extended
7569 to a HOST_WIDE_INT. Using the constant directly can result
7570 in non-canonical RTL in a 64x32 cross compile. */
7572 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7574 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7575 op1 = plus_constant (op1, INTVAL (constant_part));
7576 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7577 op1 = force_operand (op1, target);
7581 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7582 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7583 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7587 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7589 if (! CONSTANT_P (op0))
7591 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7592 VOIDmode, modifier);
7593 /* Don't go to both_summands if modifier
7594 says it's not right to return a PLUS. */
7595 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7599 /* Use immed_double_const to ensure that the constant is
7600 truncated according to the mode of OP1, then sign extended
7601 to a HOST_WIDE_INT. Using the constant directly can result
7602 in non-canonical RTL in a 64x32 cross compile. */
7604 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7606 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7607 op0 = plus_constant (op0, INTVAL (constant_part));
7608 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7609 op0 = force_operand (op0, target);
7614 /* No sense saving up arithmetic to be done
7615 if it's all in the wrong mode to form part of an address.
7616 And force_operand won't know whether to sign-extend or
7618 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7619 || mode != ptr_mode)
7622 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7625 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7626 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7629 /* Make sure any term that's a sum with a constant comes last. */
7630 if (GET_CODE (op0) == PLUS
7631 && CONSTANT_P (XEXP (op0, 1)))
7637 /* If adding to a sum including a constant,
7638 associate it to put the constant outside. */
7639 if (GET_CODE (op1) == PLUS
7640 && CONSTANT_P (XEXP (op1, 1)))
7642 rtx constant_term = const0_rtx;
7644 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7647 /* Ensure that MULT comes first if there is one. */
7648 else if (GET_CODE (op0) == MULT)
7649 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7651 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7653 /* Let's also eliminate constants from op0 if possible. */
7654 op0 = eliminate_constant_term (op0, &constant_term);
7656 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7657 their sum should be a constant. Form it into OP1, since the
7658 result we want will then be OP0 + OP1. */
7660 temp = simplify_binary_operation (PLUS, mode, constant_term,
7665 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7668 /* Put a constant term last and put a multiplication first. */
7669 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7670 temp = op1, op1 = op0, op0 = temp;
7672 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7673 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7676 /* For initializers, we are allowed to return a MINUS of two
7677 symbolic constants. Here we handle all cases when both operands
7679 /* Handle difference of two symbolic constants,
7680 for the sake of an initializer. */
7681 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7682 && really_constant_p (TREE_OPERAND (exp, 0))
7683 && really_constant_p (TREE_OPERAND (exp, 1)))
7685 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7686 VOIDmode, ro_modifier);
7687 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7688 VOIDmode, ro_modifier);
7690 /* If the last operand is a CONST_INT, use plus_constant of
7691 the negated constant. Else make the MINUS. */
7692 if (GET_CODE (op1) == CONST_INT)
7693 return plus_constant (op0, - INTVAL (op1));
7695 return gen_rtx_MINUS (mode, op0, op1);
7697 /* Convert A - const to A + (-const). */
7698 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7700 tree negated = fold (build1 (NEGATE_EXPR, type,
7701 TREE_OPERAND (exp, 1)));
7703 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7704 /* If we can't negate the constant in TYPE, leave it alone and
7705 expand_binop will negate it for us. We used to try to do it
7706 here in the signed version of TYPE, but that doesn't work
7707 on POINTER_TYPEs. */;
7710 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7714 this_optab = ! unsignedp && flag_trapv
7715 && (GET_MODE_CLASS(mode) == MODE_INT)
7716 ? subv_optab : sub_optab;
7720 /* If first operand is constant, swap them.
7721 Thus the following special case checks need only
7722 check the second operand. */
7723 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7725 tree t1 = TREE_OPERAND (exp, 0);
7726 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7727 TREE_OPERAND (exp, 1) = t1;
7730 /* Attempt to return something suitable for generating an
7731 indexed address, for machines that support that. */
7733 if (modifier == EXPAND_SUM && mode == ptr_mode
7734 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7735 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7737 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7740 /* Apply distributive law if OP0 is x+c. */
7741 if (GET_CODE (op0) == PLUS
7742 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7747 (mode, XEXP (op0, 0),
7748 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7749 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7750 * INTVAL (XEXP (op0, 1))));
7752 if (GET_CODE (op0) != REG)
7753 op0 = force_operand (op0, NULL_RTX);
7754 if (GET_CODE (op0) != REG)
7755 op0 = copy_to_mode_reg (mode, op0);
7758 gen_rtx_MULT (mode, op0,
7759 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7762 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7765 /* Check for multiplying things that have been extended
7766 from a narrower type. If this machine supports multiplying
7767 in that narrower type with a result in the desired type,
7768 do it that way, and avoid the explicit type-conversion. */
7769 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7770 && TREE_CODE (type) == INTEGER_TYPE
7771 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7772 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7773 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7774 && int_fits_type_p (TREE_OPERAND (exp, 1),
7775 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7776 /* Don't use a widening multiply if a shift will do. */
7777 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7778 > HOST_BITS_PER_WIDE_INT)
7779 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7781 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7782 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7784 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7785 /* If both operands are extended, they must either both
7786 be zero-extended or both be sign-extended. */
7787 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7789 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7791 enum machine_mode innermode
7792 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7793 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7794 ? smul_widen_optab : umul_widen_optab);
7795 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7796 ? umul_widen_optab : smul_widen_optab);
7797 if (mode == GET_MODE_WIDER_MODE (innermode))
7799 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7801 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7802 NULL_RTX, VOIDmode, 0);
7803 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7804 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7807 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7808 NULL_RTX, VOIDmode, 0);
7811 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7812 && innermode == word_mode)
7815 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7816 NULL_RTX, VOIDmode, 0);
7817 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7818 op1 = convert_modes (innermode, mode,
7819 expand_expr (TREE_OPERAND (exp, 1),
7820 NULL_RTX, VOIDmode, 0),
7823 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7824 NULL_RTX, VOIDmode, 0);
7825 temp = expand_binop (mode, other_optab, op0, op1, target,
7826 unsignedp, OPTAB_LIB_WIDEN);
7827 htem = expand_mult_highpart_adjust (innermode,
7828 gen_highpart (innermode, temp),
7830 gen_highpart (innermode, temp),
7832 emit_move_insn (gen_highpart (innermode, temp), htem);
7837 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7838 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7839 return expand_mult (mode, op0, op1, target, unsignedp);
7841 case TRUNC_DIV_EXPR:
7842 case FLOOR_DIV_EXPR:
7844 case ROUND_DIV_EXPR:
7845 case EXACT_DIV_EXPR:
7846 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7848 /* Possible optimization: compute the dividend with EXPAND_SUM
7849 then if the divisor is constant can optimize the case
7850 where some terms of the dividend have coeffs divisible by it. */
7851 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7852 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7853 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7856 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7857 expensive divide. If not, combine will rebuild the original
7859 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7860 && !real_onep (TREE_OPERAND (exp, 0)))
7861 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7862 build (RDIV_EXPR, type,
7863 build_real (type, dconst1),
7864 TREE_OPERAND (exp, 1))),
7865 target, tmode, unsignedp);
7866 this_optab = sdiv_optab;
7869 case TRUNC_MOD_EXPR:
7870 case FLOOR_MOD_EXPR:
7872 case ROUND_MOD_EXPR:
7873 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7875 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7876 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7877 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7879 case FIX_ROUND_EXPR:
7880 case FIX_FLOOR_EXPR:
7882 abort (); /* Not used for C. */
7884 case FIX_TRUNC_EXPR:
7885 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7887 target = gen_reg_rtx (mode);
7888 expand_fix (target, op0, unsignedp);
7892 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7894 target = gen_reg_rtx (mode);
7895 /* expand_float can't figure out what to do if FROM has VOIDmode.
7896 So give it the correct mode. With -O, cse will optimize this. */
7897 if (GET_MODE (op0) == VOIDmode)
7898 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7900 expand_float (target, op0,
7901 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7905 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7906 temp = expand_unop (mode,
7907 ! unsignedp && flag_trapv
7908 && (GET_MODE_CLASS(mode) == MODE_INT)
7909 ? negv_optab : neg_optab, op0, target, 0);
7915 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7917 /* Handle complex values specially. */
7918 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7919 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7920 return expand_complex_abs (mode, op0, target, unsignedp);
7922 /* Unsigned abs is simply the operand. Testing here means we don't
7923 risk generating incorrect code below. */
7924 if (TREE_UNSIGNED (type))
7927 return expand_abs (mode, op0, target, unsignedp,
7928 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7932 target = original_target;
7933 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7934 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7935 || GET_MODE (target) != mode
7936 || (GET_CODE (target) == REG
7937 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7938 target = gen_reg_rtx (mode);
7939 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7940 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7942 /* First try to do it with a special MIN or MAX instruction.
7943 If that does not win, use a conditional jump to select the proper
7945 this_optab = (TREE_UNSIGNED (type)
7946 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7947 : (code == MIN_EXPR ? smin_optab : smax_optab));
7949 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7954 /* At this point, a MEM target is no longer useful; we will get better
7957 if (GET_CODE (target) == MEM)
7958 target = gen_reg_rtx (mode);
7961 emit_move_insn (target, op0);
7963 op0 = gen_label_rtx ();
7965 /* If this mode is an integer too wide to compare properly,
7966 compare word by word. Rely on cse to optimize constant cases. */
7967 if (GET_MODE_CLASS (mode) == MODE_INT
7968 && ! can_compare_p (GE, mode, ccp_jump))
7970 if (code == MAX_EXPR)
7971 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7972 target, op1, NULL_RTX, op0);
7974 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7975 op1, target, NULL_RTX, op0);
7979 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7980 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7981 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7984 emit_move_insn (target, op1);
7989 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7990 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7996 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7997 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8002 /* ??? Can optimize bitwise operations with one arg constant.
8003 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8004 and (a bitwise1 b) bitwise2 b (etc)
8005 but that is probably not worth while. */
8007 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8008 boolean values when we want in all cases to compute both of them. In
8009 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8010 as actual zero-or-1 values and then bitwise anding. In cases where
8011 there cannot be any side effects, better code would be made by
8012 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8013 how to recognize those cases. */
8015 case TRUTH_AND_EXPR:
8017 this_optab = and_optab;
8022 this_optab = ior_optab;
8025 case TRUTH_XOR_EXPR:
8027 this_optab = xor_optab;
8034 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8036 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8037 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8040 /* Could determine the answer when only additive constants differ. Also,
8041 the addition of one can be handled by changing the condition. */
8048 case UNORDERED_EXPR:
8055 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8059 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8060 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8062 && GET_CODE (original_target) == REG
8063 && (GET_MODE (original_target)
8064 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8066 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8069 if (temp != original_target)
8070 temp = copy_to_reg (temp);
8072 op1 = gen_label_rtx ();
8073 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8074 GET_MODE (temp), unsignedp, 0, op1);
8075 emit_move_insn (temp, const1_rtx);
8080 /* If no set-flag instruction, must generate a conditional
8081 store into a temporary variable. Drop through
8082 and handle this like && and ||. */
8084 case TRUTH_ANDIF_EXPR:
8085 case TRUTH_ORIF_EXPR:
8087 && (target == 0 || ! safe_from_p (target, exp, 1)
8088 /* Make sure we don't have a hard reg (such as function's return
8089 value) live across basic blocks, if not optimizing. */
8090 || (!optimize && GET_CODE (target) == REG
8091 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8092 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8095 emit_clr_insn (target);
8097 op1 = gen_label_rtx ();
8098 jumpifnot (exp, op1);
8101 emit_0_to_1_insn (target);
8104 return ignore ? const0_rtx : target;
8106 case TRUTH_NOT_EXPR:
8107 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8108 /* The parser is careful to generate TRUTH_NOT_EXPR
8109 only with operands that are always zero or one. */
8110 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8111 target, 1, OPTAB_LIB_WIDEN);
8117 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8119 return expand_expr (TREE_OPERAND (exp, 1),
8120 (ignore ? const0_rtx : target),
8124 /* If we would have a "singleton" (see below) were it not for a
8125 conversion in each arm, bring that conversion back out. */
8126 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8127 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8128 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8129 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8131 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8132 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8134 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8135 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8136 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8137 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8138 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8139 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8140 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8141 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8142 return expand_expr (build1 (NOP_EXPR, type,
8143 build (COND_EXPR, TREE_TYPE (iftrue),
8144 TREE_OPERAND (exp, 0),
8146 target, tmode, modifier);
8150 /* Note that COND_EXPRs whose type is a structure or union
8151 are required to be constructed to contain assignments of
8152 a temporary variable, so that we can evaluate them here
8153 for side effect only. If type is void, we must do likewise. */
8155 /* If an arm of the branch requires a cleanup,
8156 only that cleanup is performed. */
8159 tree binary_op = 0, unary_op = 0;
8161 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8162 convert it to our mode, if necessary. */
8163 if (integer_onep (TREE_OPERAND (exp, 1))
8164 && integer_zerop (TREE_OPERAND (exp, 2))
8165 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8169 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8174 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8175 if (GET_MODE (op0) == mode)
8179 target = gen_reg_rtx (mode);
8180 convert_move (target, op0, unsignedp);
8184 /* Check for X ? A + B : A. If we have this, we can copy A to the
8185 output and conditionally add B. Similarly for unary operations.
8186 Don't do this if X has side-effects because those side effects
8187 might affect A or B and the "?" operation is a sequence point in
8188 ANSI. (operand_equal_p tests for side effects.) */
8190 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8191 && operand_equal_p (TREE_OPERAND (exp, 2),
8192 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8193 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8194 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8195 && operand_equal_p (TREE_OPERAND (exp, 1),
8196 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8197 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8198 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8199 && operand_equal_p (TREE_OPERAND (exp, 2),
8200 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8201 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8202 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8203 && operand_equal_p (TREE_OPERAND (exp, 1),
8204 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8205 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8207 /* If we are not to produce a result, we have no target. Otherwise,
8208 if a target was specified use it; it will not be used as an
8209 intermediate target unless it is safe. If no target, use a
8214 else if (original_target
8215 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8216 || (singleton && GET_CODE (original_target) == REG
8217 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8218 && original_target == var_rtx (singleton)))
8219 && GET_MODE (original_target) == mode
8220 #ifdef HAVE_conditional_move
8221 && (! can_conditionally_move_p (mode)
8222 || GET_CODE (original_target) == REG
8223 || TREE_ADDRESSABLE (type))
8225 && (GET_CODE (original_target) != MEM
8226 || TREE_ADDRESSABLE (type)))
8227 temp = original_target;
8228 else if (TREE_ADDRESSABLE (type))
8231 temp = assign_temp (type, 0, 0, 1);
8233 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8234 do the test of X as a store-flag operation, do this as
8235 A + ((X != 0) << log C). Similarly for other simple binary
8236 operators. Only do for C == 1 if BRANCH_COST is low. */
8237 if (temp && singleton && binary_op
8238 && (TREE_CODE (binary_op) == PLUS_EXPR
8239 || TREE_CODE (binary_op) == MINUS_EXPR
8240 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8241 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8242 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8243 : integer_onep (TREE_OPERAND (binary_op, 1)))
8244 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8247 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8248 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8249 ? addv_optab : add_optab)
8250 : TREE_CODE (binary_op) == MINUS_EXPR
8251 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8252 ? subv_optab : sub_optab)
8253 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8256 /* If we had X ? A : A + 1, do this as A + (X == 0).
8258 We have to invert the truth value here and then put it
8259 back later if do_store_flag fails. We cannot simply copy
8260 TREE_OPERAND (exp, 0) to another variable and modify that
8261 because invert_truthvalue can modify the tree pointed to
8263 if (singleton == TREE_OPERAND (exp, 1))
8264 TREE_OPERAND (exp, 0)
8265 = invert_truthvalue (TREE_OPERAND (exp, 0));
8267 result = do_store_flag (TREE_OPERAND (exp, 0),
8268 (safe_from_p (temp, singleton, 1)
8270 mode, BRANCH_COST <= 1);
8272 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8273 result = expand_shift (LSHIFT_EXPR, mode, result,
8274 build_int_2 (tree_log2
8278 (safe_from_p (temp, singleton, 1)
8279 ? temp : NULL_RTX), 0);
8283 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8284 return expand_binop (mode, boptab, op1, result, temp,
8285 unsignedp, OPTAB_LIB_WIDEN);
8287 else if (singleton == TREE_OPERAND (exp, 1))
8288 TREE_OPERAND (exp, 0)
8289 = invert_truthvalue (TREE_OPERAND (exp, 0));
8292 do_pending_stack_adjust ();
8294 op0 = gen_label_rtx ();
8296 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8300 /* If the target conflicts with the other operand of the
8301 binary op, we can't use it. Also, we can't use the target
8302 if it is a hard register, because evaluating the condition
8303 might clobber it. */
8305 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8306 || (GET_CODE (temp) == REG
8307 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8308 temp = gen_reg_rtx (mode);
8309 store_expr (singleton, temp, 0);
8312 expand_expr (singleton,
8313 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8314 if (singleton == TREE_OPERAND (exp, 1))
8315 jumpif (TREE_OPERAND (exp, 0), op0);
8317 jumpifnot (TREE_OPERAND (exp, 0), op0);
8319 start_cleanup_deferral ();
8320 if (binary_op && temp == 0)
8321 /* Just touch the other operand. */
8322 expand_expr (TREE_OPERAND (binary_op, 1),
8323 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8325 store_expr (build (TREE_CODE (binary_op), type,
8326 make_tree (type, temp),
8327 TREE_OPERAND (binary_op, 1)),
8330 store_expr (build1 (TREE_CODE (unary_op), type,
8331 make_tree (type, temp)),
8335 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8336 comparison operator. If we have one of these cases, set the
8337 output to A, branch on A (cse will merge these two references),
8338 then set the output to FOO. */
8340 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8341 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8342 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8343 TREE_OPERAND (exp, 1), 0)
8344 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8345 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8346 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8348 if (GET_CODE (temp) == REG
8349 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8350 temp = gen_reg_rtx (mode);
8351 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8352 jumpif (TREE_OPERAND (exp, 0), op0);
8354 start_cleanup_deferral ();
8355 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8359 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8360 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8361 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8362 TREE_OPERAND (exp, 2), 0)
8363 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8364 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8365 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8367 if (GET_CODE (temp) == REG
8368 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8369 temp = gen_reg_rtx (mode);
8370 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8371 jumpifnot (TREE_OPERAND (exp, 0), op0);
8373 start_cleanup_deferral ();
8374 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8379 op1 = gen_label_rtx ();
8380 jumpifnot (TREE_OPERAND (exp, 0), op0);
8382 start_cleanup_deferral ();
8384 /* One branch of the cond can be void, if it never returns. For
8385 example A ? throw : E */
8387 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8388 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8390 expand_expr (TREE_OPERAND (exp, 1),
8391 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8392 end_cleanup_deferral ();
8394 emit_jump_insn (gen_jump (op1));
8397 start_cleanup_deferral ();
8399 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8400 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8402 expand_expr (TREE_OPERAND (exp, 2),
8403 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8406 end_cleanup_deferral ();
8417 /* Something needs to be initialized, but we didn't know
8418 where that thing was when building the tree. For example,
8419 it could be the return value of a function, or a parameter
8420 to a function which lays down in the stack, or a temporary
8421 variable which must be passed by reference.
8423 We guarantee that the expression will either be constructed
8424 or copied into our original target. */
8426 tree slot = TREE_OPERAND (exp, 0);
8427 tree cleanups = NULL_TREE;
8430 if (TREE_CODE (slot) != VAR_DECL)
8434 target = original_target;
8436 /* Set this here so that if we get a target that refers to a
8437 register variable that's already been used, put_reg_into_stack
8438 knows that it should fix up those uses. */
8439 TREE_USED (slot) = 1;
8443 if (DECL_RTL_SET_P (slot))
8445 target = DECL_RTL (slot);
8446 /* If we have already expanded the slot, so don't do
8448 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8453 target = assign_temp (type, 2, 0, 1);
8454 /* All temp slots at this level must not conflict. */
8455 preserve_temp_slots (target);
8456 SET_DECL_RTL (slot, target);
8457 if (TREE_ADDRESSABLE (slot))
8458 put_var_into_stack (slot);
8460 /* Since SLOT is not known to the called function
8461 to belong to its stack frame, we must build an explicit
8462 cleanup. This case occurs when we must build up a reference
8463 to pass the reference as an argument. In this case,
8464 it is very likely that such a reference need not be
8467 if (TREE_OPERAND (exp, 2) == 0)
8468 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8469 cleanups = TREE_OPERAND (exp, 2);
8474 /* This case does occur, when expanding a parameter which
8475 needs to be constructed on the stack. The target
8476 is the actual stack address that we want to initialize.
8477 The function we call will perform the cleanup in this case. */
8479 /* If we have already assigned it space, use that space,
8480 not target that we were passed in, as our target
8481 parameter is only a hint. */
8482 if (DECL_RTL_SET_P (slot))
8484 target = DECL_RTL (slot);
8485 /* If we have already expanded the slot, so don't do
8487 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8492 SET_DECL_RTL (slot, target);
8493 /* If we must have an addressable slot, then make sure that
8494 the RTL that we just stored in slot is OK. */
8495 if (TREE_ADDRESSABLE (slot))
8496 put_var_into_stack (slot);
8500 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8501 /* Mark it as expanded. */
8502 TREE_OPERAND (exp, 1) = NULL_TREE;
8504 store_expr (exp1, target, 0);
8506 expand_decl_cleanup (NULL_TREE, cleanups);
8513 tree lhs = TREE_OPERAND (exp, 0);
8514 tree rhs = TREE_OPERAND (exp, 1);
8516 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8522 /* If lhs is complex, expand calls in rhs before computing it.
8523 That's so we don't compute a pointer and save it over a
8524 call. If lhs is simple, compute it first so we can give it
8525 as a target if the rhs is just a call. This avoids an
8526 extra temp and copy and that prevents a partial-subsumption
8527 which makes bad code. Actually we could treat
8528 component_ref's of vars like vars. */
8530 tree lhs = TREE_OPERAND (exp, 0);
8531 tree rhs = TREE_OPERAND (exp, 1);
8535 /* Check for |= or &= of a bitfield of size one into another bitfield
8536 of size 1. In this case, (unless we need the result of the
8537 assignment) we can do this more efficiently with a
8538 test followed by an assignment, if necessary.
8540 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8541 things change so we do, this code should be enhanced to
8544 && TREE_CODE (lhs) == COMPONENT_REF
8545 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8546 || TREE_CODE (rhs) == BIT_AND_EXPR)
8547 && TREE_OPERAND (rhs, 0) == lhs
8548 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8549 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8550 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8552 rtx label = gen_label_rtx ();
8554 do_jump (TREE_OPERAND (rhs, 1),
8555 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8556 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8557 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8558 (TREE_CODE (rhs) == BIT_IOR_EXPR
8560 : integer_zero_node)),
8562 do_pending_stack_adjust ();
8567 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8573 if (!TREE_OPERAND (exp, 0))
8574 expand_null_return ();
8576 expand_return (TREE_OPERAND (exp, 0));
8579 case PREINCREMENT_EXPR:
8580 case PREDECREMENT_EXPR:
8581 return expand_increment (exp, 0, ignore);
8583 case POSTINCREMENT_EXPR:
8584 case POSTDECREMENT_EXPR:
8585 /* Faster to treat as pre-increment if result is not used. */
8586 return expand_increment (exp, ! ignore, ignore);
8589 /* If nonzero, TEMP will be set to the address of something that might
8590 be a MEM corresponding to a stack slot. */
8593 /* Are we taking the address of a nested function? */
8594 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8595 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8596 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8597 && ! TREE_STATIC (exp))
8599 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8600 op0 = force_operand (op0, target);
8602 /* If we are taking the address of something erroneous, just
8604 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8608 /* We make sure to pass const0_rtx down if we came in with
8609 ignore set, to avoid doing the cleanups twice for something. */
8610 op0 = expand_expr (TREE_OPERAND (exp, 0),
8611 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8612 (modifier == EXPAND_INITIALIZER
8613 ? modifier : EXPAND_CONST_ADDRESS));
8615 /* If we are going to ignore the result, OP0 will have been set
8616 to const0_rtx, so just return it. Don't get confused and
8617 think we are taking the address of the constant. */
8621 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8622 clever and returns a REG when given a MEM. */
8623 op0 = protect_from_queue (op0, 1);
8625 /* We would like the object in memory. If it is a constant, we can
8626 have it be statically allocated into memory. For a non-constant,
8627 we need to allocate some memory and store the value into it. */
8629 if (CONSTANT_P (op0))
8630 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8632 else if (GET_CODE (op0) == MEM)
8634 mark_temp_addr_taken (op0);
8635 temp = XEXP (op0, 0);
8638 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8639 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8640 || GET_CODE (op0) == PARALLEL)
8642 /* If this object is in a register, it must be not
8644 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8645 tree nt = build_qualified_type (inner_type,
8646 (TYPE_QUALS (inner_type)
8647 | TYPE_QUAL_CONST));
8648 rtx memloc = assign_temp (nt, 1, 1, 1);
8650 mark_temp_addr_taken (memloc);
8651 if (GET_CODE (op0) == PARALLEL)
8652 /* Handle calls that pass values in multiple non-contiguous
8653 locations. The Irix 6 ABI has examples of this. */
8654 emit_group_store (memloc, op0,
8655 int_size_in_bytes (inner_type),
8656 TYPE_ALIGN (inner_type));
8658 emit_move_insn (memloc, op0);
8662 if (GET_CODE (op0) != MEM)
8665 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8667 temp = XEXP (op0, 0);
8668 #ifdef POINTERS_EXTEND_UNSIGNED
8669 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8670 && mode == ptr_mode)
8671 temp = convert_memory_address (ptr_mode, temp);
8676 op0 = force_operand (XEXP (op0, 0), target);
8679 if (flag_force_addr && GET_CODE (op0) != REG)
8680 op0 = force_reg (Pmode, op0);
8682 if (GET_CODE (op0) == REG
8683 && ! REG_USERVAR_P (op0))
8684 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8686 /* If we might have had a temp slot, add an equivalent address
8689 update_temp_slot_address (temp, op0);
8691 #ifdef POINTERS_EXTEND_UNSIGNED
8692 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8693 && mode == ptr_mode)
8694 op0 = convert_memory_address (ptr_mode, op0);
8699 case ENTRY_VALUE_EXPR:
8702 /* COMPLEX type for Extended Pascal & Fortran */
8705 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8708 /* Get the rtx code of the operands. */
8709 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8710 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8713 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8717 /* Move the real (op0) and imaginary (op1) parts to their location. */
8718 emit_move_insn (gen_realpart (mode, target), op0);
8719 emit_move_insn (gen_imagpart (mode, target), op1);
8721 insns = get_insns ();
8724 /* Complex construction should appear as a single unit. */
8725 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8726 each with a separate pseudo as destination.
8727 It's not correct for flow to treat them as a unit. */
8728 if (GET_CODE (target) != CONCAT)
8729 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8737 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8738 return gen_realpart (mode, op0);
8741 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8742 return gen_imagpart (mode, op0);
8746 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8750 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8753 target = gen_reg_rtx (mode);
8757 /* Store the realpart and the negated imagpart to target. */
8758 emit_move_insn (gen_realpart (partmode, target),
8759 gen_realpart (partmode, op0));
8761 imag_t = gen_imagpart (partmode, target);
8762 temp = expand_unop (partmode,
8763 ! unsignedp && flag_trapv
8764 && (GET_MODE_CLASS(partmode) == MODE_INT)
8765 ? negv_optab : neg_optab,
8766 gen_imagpart (partmode, op0), imag_t, 0);
8768 emit_move_insn (imag_t, temp);
8770 insns = get_insns ();
8773 /* Conjugate should appear as a single unit
8774 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8775 each with a separate pseudo as destination.
8776 It's not correct for flow to treat them as a unit. */
8777 if (GET_CODE (target) != CONCAT)
8778 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8785 case TRY_CATCH_EXPR:
8787 tree handler = TREE_OPERAND (exp, 1);
8789 expand_eh_region_start ();
8791 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8793 expand_eh_region_end_cleanup (handler);
8798 case TRY_FINALLY_EXPR:
8800 tree try_block = TREE_OPERAND (exp, 0);
8801 tree finally_block = TREE_OPERAND (exp, 1);
8802 rtx finally_label = gen_label_rtx ();
8803 rtx done_label = gen_label_rtx ();
8804 rtx return_link = gen_reg_rtx (Pmode);
8805 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8806 (tree) finally_label, (tree) return_link);
8807 TREE_SIDE_EFFECTS (cleanup) = 1;
8809 /* Start a new binding layer that will keep track of all cleanup
8810 actions to be performed. */
8811 expand_start_bindings (2);
8813 target_temp_slot_level = temp_slot_level;
8815 expand_decl_cleanup (NULL_TREE, cleanup);
8816 op0 = expand_expr (try_block, target, tmode, modifier);
8818 preserve_temp_slots (op0);
8819 expand_end_bindings (NULL_TREE, 0, 0);
8820 emit_jump (done_label);
8821 emit_label (finally_label);
8822 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8823 emit_indirect_jump (return_link);
8824 emit_label (done_label);
8828 case GOTO_SUBROUTINE_EXPR:
8830 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8831 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8832 rtx return_address = gen_label_rtx ();
8833 emit_move_insn (return_link,
8834 gen_rtx_LABEL_REF (Pmode, return_address));
8836 emit_label (return_address);
8841 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8844 return get_exception_pointer (cfun);
8847 /* Function descriptors are not valid except for as
8848 initialization constants, and should not be expanded. */
8852 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8855 /* Here to do an ordinary binary operator, generating an instruction
8856 from the optab already placed in `this_optab'. */
8858 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8860 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8861 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8863 temp = expand_binop (mode, this_optab, op0, op1, target,
8864 unsignedp, OPTAB_LIB_WIDEN);
8870 /* Similar to expand_expr, except that we don't specify a target, target
8871 mode, or modifier and we return the alignment of the inner type. This is
8872 used in cases where it is not necessary to align the result to the
8873 alignment of its type as long as we know the alignment of the result, for
8874 example for comparisons of BLKmode values. */
8877 expand_expr_unaligned (exp, palign)
8879 unsigned int *palign;
8882 tree type = TREE_TYPE (exp);
8883 enum machine_mode mode = TYPE_MODE (type);
8885 /* Default the alignment we return to that of the type. */
8886 *palign = TYPE_ALIGN (type);
8888 /* The only cases in which we do anything special is if the resulting mode
8890 if (mode != BLKmode)
8891 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8893 switch (TREE_CODE (exp))
8897 case NON_LVALUE_EXPR:
8898 /* Conversions between BLKmode values don't change the underlying
8899 alignment or value. */
8900 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8901 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8905 /* Much of the code for this case is copied directly from expand_expr.
8906 We need to duplicate it here because we will do something different
8907 in the fall-through case, so we need to handle the same exceptions
8910 tree array = TREE_OPERAND (exp, 0);
8911 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8912 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8913 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8916 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8919 /* Optimize the special-case of a zero lower bound.
8921 We convert the low_bound to sizetype to avoid some problems
8922 with constant folding. (E.g. suppose the lower bound is 1,
8923 and its mode is QI. Without the conversion, (ARRAY
8924 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8925 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8927 if (! integer_zerop (low_bound))
8928 index = size_diffop (index, convert (sizetype, low_bound));
8930 /* If this is a constant index into a constant array,
8931 just get the value from the array. Handle both the cases when
8932 we have an explicit constructor and when our operand is a variable
8933 that was declared const. */
8935 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8936 && host_integerp (index, 0)
8937 && 0 > compare_tree_int (index,
8938 list_length (CONSTRUCTOR_ELTS
8939 (TREE_OPERAND (exp, 0)))))
8943 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8944 i = tree_low_cst (index, 0);
8945 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8949 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8952 else if (optimize >= 1
8953 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8954 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8955 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8957 if (TREE_CODE (index) == INTEGER_CST)
8959 tree init = DECL_INITIAL (array);
8961 if (TREE_CODE (init) == CONSTRUCTOR)
8965 for (elem = CONSTRUCTOR_ELTS (init);
8966 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8967 elem = TREE_CHAIN (elem))
8971 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8981 case ARRAY_RANGE_REF:
8982 /* If the operand is a CONSTRUCTOR, we can just extract the
8983 appropriate field if it is present. Don't do this if we have
8984 already written the data since we want to refer to that copy
8985 and varasm.c assumes that's what we'll do. */
8986 if (TREE_CODE (exp) == COMPONENT_REF
8987 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8988 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8992 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8993 elt = TREE_CHAIN (elt))
8994 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8995 /* Note that unlike the case in expand_expr, we know this is
8996 BLKmode and hence not an integer. */
8997 return expand_expr_unaligned (TREE_VALUE (elt), palign);
9001 enum machine_mode mode1;
9002 HOST_WIDE_INT bitsize, bitpos;
9005 unsigned int alignment;
9007 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9008 &mode1, &unsignedp, &volatilep,
9011 /* If we got back the original object, something is wrong. Perhaps
9012 we are evaluating an expression too early. In any event, don't
9013 infinitely recurse. */
9017 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9019 /* If this is a constant, put it into a register if it is a
9020 legitimate constant and OFFSET is 0 and memory if it isn't. */
9021 if (CONSTANT_P (op0))
9023 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9025 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9027 op0 = force_reg (inner_mode, op0);
9029 op0 = validize_mem (force_const_mem (inner_mode, op0));
9034 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9036 /* If this object is in a register, put it into memory.
9037 This case can't occur in C, but can in Ada if we have
9038 unchecked conversion of an expression from a scalar type to
9039 an array or record type. */
9040 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9041 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9043 tree nt = build_qualified_type (TREE_TYPE (tem),
9044 (TYPE_QUALS (TREE_TYPE (tem))
9045 | TYPE_QUAL_CONST));
9046 rtx memloc = assign_temp (nt, 1, 1, 1);
9048 mark_temp_addr_taken (memloc);
9049 emit_move_insn (memloc, op0);
9053 if (GET_CODE (op0) != MEM)
9056 if (GET_MODE (offset_rtx) != ptr_mode)
9057 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9059 #ifdef POINTERS_EXTEND_UNSIGNED
9060 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9063 op0 = offset_address (op0, offset_rtx,
9064 highest_pow2_factor (offset));
9067 /* Don't forget about volatility even if this is a bitfield. */
9068 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9070 op0 = copy_rtx (op0);
9071 MEM_VOLATILE_P (op0) = 1;
9074 /* Check the access. */
9075 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9080 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9081 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9083 /* Check the access right of the pointer. */
9084 in_check_memory_usage = 1;
9085 if (size > BITS_PER_UNIT)
9086 emit_library_call (chkr_check_addr_libfunc,
9087 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9088 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9089 TYPE_MODE (sizetype),
9090 GEN_INT (MEMORY_USE_RO),
9091 TYPE_MODE (integer_type_node));
9092 in_check_memory_usage = 0;
9095 /* In cases where an aligned union has an unaligned object
9096 as a field, we might be extracting a BLKmode value from
9097 an integer-mode (e.g., SImode) object. Handle this case
9098 by doing the extract into an object as wide as the field
9099 (which we know to be the width of a basic mode), then
9100 storing into memory, and changing the mode to BLKmode.
9101 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9102 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9103 if (mode1 == VOIDmode
9104 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9105 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9106 && (TYPE_ALIGN (type) > alignment
9107 || bitpos % TYPE_ALIGN (type) != 0)))
9109 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9111 if (ext_mode == BLKmode)
9113 /* In this case, BITPOS must start at a byte boundary. */
9114 if (GET_CODE (op0) != MEM
9115 || bitpos % BITS_PER_UNIT != 0)
9118 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9122 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9124 rtx new = assign_temp (nt, 0, 1, 1);
9126 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9127 unsignedp, NULL_RTX, ext_mode,
9128 ext_mode, alignment,
9129 int_size_in_bytes (TREE_TYPE (tem)));
9131 /* If the result is a record type and BITSIZE is narrower than
9132 the mode of OP0, an integral mode, and this is a big endian
9133 machine, we must put the field into the high-order bits. */
9134 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9135 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9136 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9137 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9138 size_int (GET_MODE_BITSIZE
9143 emit_move_insn (new, op0);
9144 op0 = copy_rtx (new);
9145 PUT_MODE (op0, BLKmode);
9149 /* Get a reference to just this component. */
9150 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9152 set_mem_attributes (op0, exp, 0);
9154 /* Adjust the alignment in case the bit position is not
9155 a multiple of the alignment of the inner object. */
9156 while (bitpos % alignment != 0)
9159 if (GET_CODE (XEXP (op0, 0)) == REG)
9160 mark_reg_pointer (XEXP (op0, 0), alignment);
9162 MEM_IN_STRUCT_P (op0) = 1;
9163 MEM_VOLATILE_P (op0) |= volatilep;
9165 *palign = alignment;
9174 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9177 /* Return the tree node if a ARG corresponds to a string constant or zero
9178 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9179 in bytes within the string that ARG is accessing. The type of the
9180 offset will be `sizetype'. */
9183 string_constant (arg, ptr_offset)
9189 if (TREE_CODE (arg) == ADDR_EXPR
9190 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9192 *ptr_offset = size_zero_node;
9193 return TREE_OPERAND (arg, 0);
9195 else if (TREE_CODE (arg) == PLUS_EXPR)
9197 tree arg0 = TREE_OPERAND (arg, 0);
9198 tree arg1 = TREE_OPERAND (arg, 1);
9203 if (TREE_CODE (arg0) == ADDR_EXPR
9204 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9206 *ptr_offset = convert (sizetype, arg1);
9207 return TREE_OPERAND (arg0, 0);
9209 else if (TREE_CODE (arg1) == ADDR_EXPR
9210 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9212 *ptr_offset = convert (sizetype, arg0);
9213 return TREE_OPERAND (arg1, 0);
9220 /* Expand code for a post- or pre- increment or decrement
9221 and return the RTX for the result.
9222 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9225 expand_increment (exp, post, ignore)
9231 tree incremented = TREE_OPERAND (exp, 0);
9232 optab this_optab = add_optab;
9234 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9235 int op0_is_copy = 0;
9236 int single_insn = 0;
9237 /* 1 means we can't store into OP0 directly,
9238 because it is a subreg narrower than a word,
9239 and we don't dare clobber the rest of the word. */
9242 /* Stabilize any component ref that might need to be
9243 evaluated more than once below. */
9245 || TREE_CODE (incremented) == BIT_FIELD_REF
9246 || (TREE_CODE (incremented) == COMPONENT_REF
9247 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9248 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9249 incremented = stabilize_reference (incremented);
9250 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9251 ones into save exprs so that they don't accidentally get evaluated
9252 more than once by the code below. */
9253 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9254 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9255 incremented = save_expr (incremented);
9257 /* Compute the operands as RTX.
9258 Note whether OP0 is the actual lvalue or a copy of it:
9259 I believe it is a copy iff it is a register or subreg
9260 and insns were generated in computing it. */
9262 temp = get_last_insn ();
9263 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9265 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9266 in place but instead must do sign- or zero-extension during assignment,
9267 so we copy it into a new register and let the code below use it as
9270 Note that we can safely modify this SUBREG since it is know not to be
9271 shared (it was made by the expand_expr call above). */
9273 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9276 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9280 else if (GET_CODE (op0) == SUBREG
9281 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9283 /* We cannot increment this SUBREG in place. If we are
9284 post-incrementing, get a copy of the old value. Otherwise,
9285 just mark that we cannot increment in place. */
9287 op0 = copy_to_reg (op0);
9292 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9293 && temp != get_last_insn ());
9294 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9295 EXPAND_MEMORY_USE_BAD);
9297 /* Decide whether incrementing or decrementing. */
9298 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9299 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9300 this_optab = sub_optab;
9302 /* Convert decrement by a constant into a negative increment. */
9303 if (this_optab == sub_optab
9304 && GET_CODE (op1) == CONST_INT)
9306 op1 = GEN_INT (-INTVAL (op1));
9307 this_optab = add_optab;
9310 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9311 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9313 /* For a preincrement, see if we can do this with a single instruction. */
9316 icode = (int) this_optab->handlers[(int) mode].insn_code;
9317 if (icode != (int) CODE_FOR_nothing
9318 /* Make sure that OP0 is valid for operands 0 and 1
9319 of the insn we want to queue. */
9320 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9321 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9322 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9326 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9327 then we cannot just increment OP0. We must therefore contrive to
9328 increment the original value. Then, for postincrement, we can return
9329 OP0 since it is a copy of the old value. For preincrement, expand here
9330 unless we can do it with a single insn.
9332 Likewise if storing directly into OP0 would clobber high bits
9333 we need to preserve (bad_subreg). */
9334 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9336 /* This is the easiest way to increment the value wherever it is.
9337 Problems with multiple evaluation of INCREMENTED are prevented
9338 because either (1) it is a component_ref or preincrement,
9339 in which case it was stabilized above, or (2) it is an array_ref
9340 with constant index in an array in a register, which is
9341 safe to reevaluate. */
9342 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9343 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9344 ? MINUS_EXPR : PLUS_EXPR),
9347 TREE_OPERAND (exp, 1));
9349 while (TREE_CODE (incremented) == NOP_EXPR
9350 || TREE_CODE (incremented) == CONVERT_EXPR)
9352 newexp = convert (TREE_TYPE (incremented), newexp);
9353 incremented = TREE_OPERAND (incremented, 0);
9356 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9357 return post ? op0 : temp;
9362 /* We have a true reference to the value in OP0.
9363 If there is an insn to add or subtract in this mode, queue it.
9364 Queueing the increment insn avoids the register shuffling
9365 that often results if we must increment now and first save
9366 the old value for subsequent use. */
9368 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9369 op0 = stabilize (op0);
9372 icode = (int) this_optab->handlers[(int) mode].insn_code;
9373 if (icode != (int) CODE_FOR_nothing
9374 /* Make sure that OP0 is valid for operands 0 and 1
9375 of the insn we want to queue. */
9376 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9377 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9379 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9380 op1 = force_reg (mode, op1);
9382 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9384 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9386 rtx addr = (general_operand (XEXP (op0, 0), mode)
9387 ? force_reg (Pmode, XEXP (op0, 0))
9388 : copy_to_reg (XEXP (op0, 0)));
9391 op0 = replace_equiv_address (op0, addr);
9392 temp = force_reg (GET_MODE (op0), op0);
9393 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9394 op1 = force_reg (mode, op1);
9396 /* The increment queue is LIFO, thus we have to `queue'
9397 the instructions in reverse order. */
9398 enqueue_insn (op0, gen_move_insn (op0, temp));
9399 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9404 /* Preincrement, or we can't increment with one simple insn. */
9406 /* Save a copy of the value before inc or dec, to return it later. */
9407 temp = value = copy_to_reg (op0);
9409 /* Arrange to return the incremented value. */
9410 /* Copy the rtx because expand_binop will protect from the queue,
9411 and the results of that would be invalid for us to return
9412 if our caller does emit_queue before using our result. */
9413 temp = copy_rtx (value = op0);
9415 /* Increment however we can. */
9416 op1 = expand_binop (mode, this_optab, value, op1,
9417 current_function_check_memory_usage ? NULL_RTX : op0,
9418 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9419 /* Make sure the value is stored into OP0. */
9421 emit_move_insn (op0, op1);
9426 /* At the start of a function, record that we have no previously-pushed
9427 arguments waiting to be popped. */
9430 init_pending_stack_adjust ()
9432 pending_stack_adjust = 0;
9435 /* When exiting from function, if safe, clear out any pending stack adjust
9436 so the adjustment won't get done.
9438 Note, if the current function calls alloca, then it must have a
9439 frame pointer regardless of the value of flag_omit_frame_pointer. */
9442 clear_pending_stack_adjust ()
9444 #ifdef EXIT_IGNORE_STACK
9446 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9447 && EXIT_IGNORE_STACK
9448 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9449 && ! flag_inline_functions)
9451 stack_pointer_delta -= pending_stack_adjust,
9452 pending_stack_adjust = 0;
9457 /* Pop any previously-pushed arguments that have not been popped yet. */
9460 do_pending_stack_adjust ()
9462 if (inhibit_defer_pop == 0)
9464 if (pending_stack_adjust != 0)
9465 adjust_stack (GEN_INT (pending_stack_adjust));
9466 pending_stack_adjust = 0;
9470 /* Expand conditional expressions. */
9472 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9473 LABEL is an rtx of code CODE_LABEL, in this function and all the
9477 jumpifnot (exp, label)
9481 do_jump (exp, label, NULL_RTX);
9484 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9491 do_jump (exp, NULL_RTX, label);
9494 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9495 the result is zero, or IF_TRUE_LABEL if the result is one.
9496 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9497 meaning fall through in that case.
9499 do_jump always does any pending stack adjust except when it does not
9500 actually perform a jump. An example where there is no jump
9501 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9503 This function is responsible for optimizing cases such as
9504 &&, || and comparison operators in EXP. */
9507 do_jump (exp, if_false_label, if_true_label)
9509 rtx if_false_label, if_true_label;
9511 enum tree_code code = TREE_CODE (exp);
9512 /* Some cases need to create a label to jump to
9513 in order to properly fall through.
9514 These cases set DROP_THROUGH_LABEL nonzero. */
9515 rtx drop_through_label = 0;
9519 enum machine_mode mode;
9521 #ifdef MAX_INTEGER_COMPUTATION_MODE
9522 check_max_integer_computation_mode (exp);
9533 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9539 /* This is not true with #pragma weak */
9541 /* The address of something can never be zero. */
9543 emit_jump (if_true_label);
9548 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9549 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9550 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9551 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9554 /* If we are narrowing the operand, we have to do the compare in the
9556 if ((TYPE_PRECISION (TREE_TYPE (exp))
9557 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9559 case NON_LVALUE_EXPR:
9560 case REFERENCE_EXPR:
9565 /* These cannot change zero->non-zero or vice versa. */
9566 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9569 case WITH_RECORD_EXPR:
9570 /* Put the object on the placeholder list, recurse through our first
9571 operand, and pop the list. */
9572 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9574 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9575 placeholder_list = TREE_CHAIN (placeholder_list);
9579 /* This is never less insns than evaluating the PLUS_EXPR followed by
9580 a test and can be longer if the test is eliminated. */
9582 /* Reduce to minus. */
9583 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9584 TREE_OPERAND (exp, 0),
9585 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9586 TREE_OPERAND (exp, 1))));
9587 /* Process as MINUS. */
9591 /* Non-zero iff operands of minus differ. */
9592 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9593 TREE_OPERAND (exp, 0),
9594 TREE_OPERAND (exp, 1)),
9595 NE, NE, if_false_label, if_true_label);
9599 /* If we are AND'ing with a small constant, do this comparison in the
9600 smallest type that fits. If the machine doesn't have comparisons
9601 that small, it will be converted back to the wider comparison.
9602 This helps if we are testing the sign bit of a narrower object.
9603 combine can't do this for us because it can't know whether a
9604 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9606 if (! SLOW_BYTE_ACCESS
9607 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9608 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9609 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9610 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9611 && (type = type_for_mode (mode, 1)) != 0
9612 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9613 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9614 != CODE_FOR_nothing))
9616 do_jump (convert (type, exp), if_false_label, if_true_label);
9621 case TRUTH_NOT_EXPR:
9622 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9625 case TRUTH_ANDIF_EXPR:
9626 if (if_false_label == 0)
9627 if_false_label = drop_through_label = gen_label_rtx ();
9628 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9629 start_cleanup_deferral ();
9630 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9631 end_cleanup_deferral ();
9634 case TRUTH_ORIF_EXPR:
9635 if (if_true_label == 0)
9636 if_true_label = drop_through_label = gen_label_rtx ();
9637 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9638 start_cleanup_deferral ();
9639 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9640 end_cleanup_deferral ();
9645 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9646 preserve_temp_slots (NULL_RTX);
9650 do_pending_stack_adjust ();
9651 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9657 case ARRAY_RANGE_REF:
9659 HOST_WIDE_INT bitsize, bitpos;
9661 enum machine_mode mode;
9665 unsigned int alignment;
9667 /* Get description of this reference. We don't actually care
9668 about the underlying object here. */
9669 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9670 &unsignedp, &volatilep, &alignment);
9672 type = type_for_size (bitsize, unsignedp);
9673 if (! SLOW_BYTE_ACCESS
9674 && type != 0 && bitsize >= 0
9675 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9676 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9677 != CODE_FOR_nothing))
9679 do_jump (convert (type, exp), if_false_label, if_true_label);
9686 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9687 if (integer_onep (TREE_OPERAND (exp, 1))
9688 && integer_zerop (TREE_OPERAND (exp, 2)))
9689 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9691 else if (integer_zerop (TREE_OPERAND (exp, 1))
9692 && integer_onep (TREE_OPERAND (exp, 2)))
9693 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9697 rtx label1 = gen_label_rtx ();
9698 drop_through_label = gen_label_rtx ();
9700 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9702 start_cleanup_deferral ();
9703 /* Now the THEN-expression. */
9704 do_jump (TREE_OPERAND (exp, 1),
9705 if_false_label ? if_false_label : drop_through_label,
9706 if_true_label ? if_true_label : drop_through_label);
9707 /* In case the do_jump just above never jumps. */
9708 do_pending_stack_adjust ();
9709 emit_label (label1);
9711 /* Now the ELSE-expression. */
9712 do_jump (TREE_OPERAND (exp, 2),
9713 if_false_label ? if_false_label : drop_through_label,
9714 if_true_label ? if_true_label : drop_through_label);
9715 end_cleanup_deferral ();
9721 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9723 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9724 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9726 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9727 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9730 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9731 fold (build (EQ_EXPR, TREE_TYPE (exp),
9732 fold (build1 (REALPART_EXPR,
9733 TREE_TYPE (inner_type),
9735 fold (build1 (REALPART_EXPR,
9736 TREE_TYPE (inner_type),
9738 fold (build (EQ_EXPR, TREE_TYPE (exp),
9739 fold (build1 (IMAGPART_EXPR,
9740 TREE_TYPE (inner_type),
9742 fold (build1 (IMAGPART_EXPR,
9743 TREE_TYPE (inner_type),
9745 if_false_label, if_true_label);
9748 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9749 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9751 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9752 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9753 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9755 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9761 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9763 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9764 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9766 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9767 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9770 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9771 fold (build (NE_EXPR, TREE_TYPE (exp),
9772 fold (build1 (REALPART_EXPR,
9773 TREE_TYPE (inner_type),
9775 fold (build1 (REALPART_EXPR,
9776 TREE_TYPE (inner_type),
9778 fold (build (NE_EXPR, TREE_TYPE (exp),
9779 fold (build1 (IMAGPART_EXPR,
9780 TREE_TYPE (inner_type),
9782 fold (build1 (IMAGPART_EXPR,
9783 TREE_TYPE (inner_type),
9785 if_false_label, if_true_label);
9788 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9789 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9791 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9792 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9793 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9795 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9800 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9801 if (GET_MODE_CLASS (mode) == MODE_INT
9802 && ! can_compare_p (LT, mode, ccp_jump))
9803 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9805 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9809 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9810 if (GET_MODE_CLASS (mode) == MODE_INT
9811 && ! can_compare_p (LE, mode, ccp_jump))
9812 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9814 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9818 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9819 if (GET_MODE_CLASS (mode) == MODE_INT
9820 && ! can_compare_p (GT, mode, ccp_jump))
9821 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9823 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9827 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9828 if (GET_MODE_CLASS (mode) == MODE_INT
9829 && ! can_compare_p (GE, mode, ccp_jump))
9830 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9832 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9835 case UNORDERED_EXPR:
9838 enum rtx_code cmp, rcmp;
9841 if (code == UNORDERED_EXPR)
9842 cmp = UNORDERED, rcmp = ORDERED;
9844 cmp = ORDERED, rcmp = UNORDERED;
9845 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9848 if (! can_compare_p (cmp, mode, ccp_jump)
9849 && (can_compare_p (rcmp, mode, ccp_jump)
9850 /* If the target doesn't provide either UNORDERED or ORDERED
9851 comparisons, canonicalize on UNORDERED for the library. */
9852 || rcmp == UNORDERED))
9856 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9858 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9863 enum rtx_code rcode1;
9864 enum tree_code tcode2;
9888 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9889 if (can_compare_p (rcode1, mode, ccp_jump))
9890 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9894 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9895 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9898 /* If the target doesn't support combined unordered
9899 compares, decompose into UNORDERED + comparison. */
9900 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9901 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9902 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9903 do_jump (exp, if_false_label, if_true_label);
9909 __builtin_expect (<test>, 0) and
9910 __builtin_expect (<test>, 1)
9912 We need to do this here, so that <test> is not converted to a SCC
9913 operation on machines that use condition code registers and COMPARE
9914 like the PowerPC, and then the jump is done based on whether the SCC
9915 operation produced a 1 or 0. */
9917 /* Check for a built-in function. */
9918 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9920 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9921 tree arglist = TREE_OPERAND (exp, 1);
9923 if (TREE_CODE (fndecl) == FUNCTION_DECL
9924 && DECL_BUILT_IN (fndecl)
9925 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9926 && arglist != NULL_TREE
9927 && TREE_CHAIN (arglist) != NULL_TREE)
9929 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9932 if (seq != NULL_RTX)
9939 /* fall through and generate the normal code. */
9943 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9945 /* This is not needed any more and causes poor code since it causes
9946 comparisons and tests from non-SI objects to have different code
9948 /* Copy to register to avoid generating bad insns by cse
9949 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9950 if (!cse_not_expected && GET_CODE (temp) == MEM)
9951 temp = copy_to_reg (temp);
9953 do_pending_stack_adjust ();
9954 /* Do any postincrements in the expression that was tested. */
9957 if (GET_CODE (temp) == CONST_INT
9958 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9959 || GET_CODE (temp) == LABEL_REF)
9961 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9965 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9966 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9967 /* Note swapping the labels gives us not-equal. */
9968 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9969 else if (GET_MODE (temp) != VOIDmode)
9970 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9971 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9972 GET_MODE (temp), NULL_RTX, 0,
9973 if_false_label, if_true_label);
9978 if (drop_through_label)
9980 /* If do_jump produces code that might be jumped around,
9981 do any stack adjusts from that code, before the place
9982 where control merges in. */
9983 do_pending_stack_adjust ();
9984 emit_label (drop_through_label);
9988 /* Given a comparison expression EXP for values too wide to be compared
9989 with one insn, test the comparison and jump to the appropriate label.
9990 The code of EXP is ignored; we always test GT if SWAP is 0,
9991 and LT if SWAP is 1. */
9994 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9997 rtx if_false_label, if_true_label;
9999 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10000 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10001 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10002 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10004 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10007 /* Compare OP0 with OP1, word at a time, in mode MODE.
10008 UNSIGNEDP says to do unsigned comparison.
10009 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10012 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10013 enum machine_mode mode;
10016 rtx if_false_label, if_true_label;
10018 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10019 rtx drop_through_label = 0;
10022 if (! if_true_label || ! if_false_label)
10023 drop_through_label = gen_label_rtx ();
10024 if (! if_true_label)
10025 if_true_label = drop_through_label;
10026 if (! if_false_label)
10027 if_false_label = drop_through_label;
10029 /* Compare a word at a time, high order first. */
10030 for (i = 0; i < nwords; i++)
10032 rtx op0_word, op1_word;
10034 if (WORDS_BIG_ENDIAN)
10036 op0_word = operand_subword_force (op0, i, mode);
10037 op1_word = operand_subword_force (op1, i, mode);
10041 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10042 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10045 /* All but high-order word must be compared as unsigned. */
10046 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10047 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10048 NULL_RTX, if_true_label);
10050 /* Consider lower words only if these are equal. */
10051 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10052 NULL_RTX, 0, NULL_RTX, if_false_label);
10055 if (if_false_label)
10056 emit_jump (if_false_label);
10057 if (drop_through_label)
10058 emit_label (drop_through_label);
10061 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10062 with one insn, test the comparison and jump to the appropriate label. */
10065 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10067 rtx if_false_label, if_true_label;
10069 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10070 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10071 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10072 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10074 rtx drop_through_label = 0;
10076 if (! if_false_label)
10077 drop_through_label = if_false_label = gen_label_rtx ();
10079 for (i = 0; i < nwords; i++)
10080 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10081 operand_subword_force (op1, i, mode),
10082 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10083 word_mode, NULL_RTX, 0, if_false_label,
10087 emit_jump (if_true_label);
10088 if (drop_through_label)
10089 emit_label (drop_through_label);
10092 /* Jump according to whether OP0 is 0.
10093 We assume that OP0 has an integer mode that is too wide
10094 for the available compare insns. */
10097 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10099 rtx if_false_label, if_true_label;
10101 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10104 rtx drop_through_label = 0;
10106 /* The fastest way of doing this comparison on almost any machine is to
10107 "or" all the words and compare the result. If all have to be loaded
10108 from memory and this is a very wide item, it's possible this may
10109 be slower, but that's highly unlikely. */
10111 part = gen_reg_rtx (word_mode);
10112 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10113 for (i = 1; i < nwords && part != 0; i++)
10114 part = expand_binop (word_mode, ior_optab, part,
10115 operand_subword_force (op0, i, GET_MODE (op0)),
10116 part, 1, OPTAB_WIDEN);
10120 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10121 NULL_RTX, 0, if_false_label, if_true_label);
10126 /* If we couldn't do the "or" simply, do this with a series of compares. */
10127 if (! if_false_label)
10128 drop_through_label = if_false_label = gen_label_rtx ();
10130 for (i = 0; i < nwords; i++)
10131 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10132 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10133 if_false_label, NULL_RTX);
10136 emit_jump (if_true_label);
10138 if (drop_through_label)
10139 emit_label (drop_through_label);
10142 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10143 (including code to compute the values to be compared)
10144 and set (CC0) according to the result.
10145 The decision as to signed or unsigned comparison must be made by the caller.
10147 We force a stack adjustment unless there are currently
10148 things pushed on the stack that aren't yet used.
10150 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10153 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10154 size of MODE should be used. */
10157 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10159 enum rtx_code code;
10161 enum machine_mode mode;
10163 unsigned int align;
10167 /* If one operand is constant, make it the second one. Only do this
10168 if the other operand is not constant as well. */
10170 if (swap_commutative_operands_p (op0, op1))
10175 code = swap_condition (code);
10178 if (flag_force_mem)
10180 op0 = force_not_mem (op0);
10181 op1 = force_not_mem (op1);
10184 do_pending_stack_adjust ();
10186 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10187 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10191 /* There's no need to do this now that combine.c can eliminate lots of
10192 sign extensions. This can be less efficient in certain cases on other
10195 /* If this is a signed equality comparison, we can do it as an
10196 unsigned comparison since zero-extension is cheaper than sign
10197 extension and comparisons with zero are done as unsigned. This is
10198 the case even on machines that can do fast sign extension, since
10199 zero-extension is easier to combine with other operations than
10200 sign-extension is. If we are comparing against a constant, we must
10201 convert it to what it would look like unsigned. */
10202 if ((code == EQ || code == NE) && ! unsignedp
10203 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10205 if (GET_CODE (op1) == CONST_INT
10206 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10207 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10212 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10214 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10217 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10218 The decision as to signed or unsigned comparison must be made by the caller.
10220 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10223 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10224 size of MODE should be used. */
10227 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10228 if_false_label, if_true_label)
10230 enum rtx_code code;
10232 enum machine_mode mode;
10234 unsigned int align;
10235 rtx if_false_label, if_true_label;
10238 int dummy_true_label = 0;
10240 /* Reverse the comparison if that is safe and we want to jump if it is
10242 if (! if_true_label && ! FLOAT_MODE_P (mode))
10244 if_true_label = if_false_label;
10245 if_false_label = 0;
10246 code = reverse_condition (code);
10249 /* If one operand is constant, make it the second one. Only do this
10250 if the other operand is not constant as well. */
10252 if (swap_commutative_operands_p (op0, op1))
10257 code = swap_condition (code);
10260 if (flag_force_mem)
10262 op0 = force_not_mem (op0);
10263 op1 = force_not_mem (op1);
10266 do_pending_stack_adjust ();
10268 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10269 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10271 if (tem == const_true_rtx)
10274 emit_jump (if_true_label);
10278 if (if_false_label)
10279 emit_jump (if_false_label);
10285 /* There's no need to do this now that combine.c can eliminate lots of
10286 sign extensions. This can be less efficient in certain cases on other
10289 /* If this is a signed equality comparison, we can do it as an
10290 unsigned comparison since zero-extension is cheaper than sign
10291 extension and comparisons with zero are done as unsigned. This is
10292 the case even on machines that can do fast sign extension, since
10293 zero-extension is easier to combine with other operations than
10294 sign-extension is. If we are comparing against a constant, we must
10295 convert it to what it would look like unsigned. */
10296 if ((code == EQ || code == NE) && ! unsignedp
10297 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10299 if (GET_CODE (op1) == CONST_INT
10300 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10301 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10306 if (! if_true_label)
10308 dummy_true_label = 1;
10309 if_true_label = gen_label_rtx ();
10312 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10315 if (if_false_label)
10316 emit_jump (if_false_label);
10317 if (dummy_true_label)
10318 emit_label (if_true_label);
10321 /* Generate code for a comparison expression EXP (including code to compute
10322 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10323 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10324 generated code will drop through.
10325 SIGNED_CODE should be the rtx operation for this comparison for
10326 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10328 We force a stack adjustment unless there are currently
10329 things pushed on the stack that aren't yet used. */
10332 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10335 enum rtx_code signed_code, unsigned_code;
10336 rtx if_false_label, if_true_label;
10338 unsigned int align0, align1;
10341 enum machine_mode mode;
10343 enum rtx_code code;
10345 /* Don't crash if the comparison was erroneous. */
10346 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10347 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10350 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10351 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10354 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10355 mode = TYPE_MODE (type);
10356 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10357 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10358 || (GET_MODE_BITSIZE (mode)
10359 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10362 /* op0 might have been replaced by promoted constant, in which
10363 case the type of second argument should be used. */
10364 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10365 mode = TYPE_MODE (type);
10367 unsignedp = TREE_UNSIGNED (type);
10368 code = unsignedp ? unsigned_code : signed_code;
10370 #ifdef HAVE_canonicalize_funcptr_for_compare
10371 /* If function pointers need to be "canonicalized" before they can
10372 be reliably compared, then canonicalize them. */
10373 if (HAVE_canonicalize_funcptr_for_compare
10374 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10375 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10378 rtx new_op0 = gen_reg_rtx (mode);
10380 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10384 if (HAVE_canonicalize_funcptr_for_compare
10385 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10386 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10389 rtx new_op1 = gen_reg_rtx (mode);
10391 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10396 /* Do any postincrements in the expression that was tested. */
10399 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10401 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10402 MIN (align0, align1),
10403 if_false_label, if_true_label);
10406 /* Generate code to calculate EXP using a store-flag instruction
10407 and return an rtx for the result. EXP is either a comparison
10408 or a TRUTH_NOT_EXPR whose operand is a comparison.
10410 If TARGET is nonzero, store the result there if convenient.
10412 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10415 Return zero if there is no suitable set-flag instruction
10416 available on this machine.
10418 Once expand_expr has been called on the arguments of the comparison,
10419 we are committed to doing the store flag, since it is not safe to
10420 re-evaluate the expression. We emit the store-flag insn by calling
10421 emit_store_flag, but only expand the arguments if we have a reason
10422 to believe that emit_store_flag will be successful. If we think that
10423 it will, but it isn't, we have to simulate the store-flag with a
10424 set/jump/set sequence. */
10427 do_store_flag (exp, target, mode, only_cheap)
10430 enum machine_mode mode;
10433 enum rtx_code code;
10434 tree arg0, arg1, type;
10436 enum machine_mode operand_mode;
10440 enum insn_code icode;
10441 rtx subtarget = target;
10444 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10445 result at the end. We can't simply invert the test since it would
10446 have already been inverted if it were valid. This case occurs for
10447 some floating-point comparisons. */
10449 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10450 invert = 1, exp = TREE_OPERAND (exp, 0);
10452 arg0 = TREE_OPERAND (exp, 0);
10453 arg1 = TREE_OPERAND (exp, 1);
10455 /* Don't crash if the comparison was erroneous. */
10456 if (arg0 == error_mark_node || arg1 == error_mark_node)
10459 type = TREE_TYPE (arg0);
10460 operand_mode = TYPE_MODE (type);
10461 unsignedp = TREE_UNSIGNED (type);
10463 /* We won't bother with BLKmode store-flag operations because it would mean
10464 passing a lot of information to emit_store_flag. */
10465 if (operand_mode == BLKmode)
10468 /* We won't bother with store-flag operations involving function pointers
10469 when function pointers must be canonicalized before comparisons. */
10470 #ifdef HAVE_canonicalize_funcptr_for_compare
10471 if (HAVE_canonicalize_funcptr_for_compare
10472 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10473 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10475 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10476 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10477 == FUNCTION_TYPE))))
10484 /* Get the rtx comparison code to use. We know that EXP is a comparison
10485 operation of some type. Some comparisons against 1 and -1 can be
10486 converted to comparisons with zero. Do so here so that the tests
10487 below will be aware that we have a comparison with zero. These
10488 tests will not catch constants in the first operand, but constants
10489 are rarely passed as the first operand. */
10491 switch (TREE_CODE (exp))
10500 if (integer_onep (arg1))
10501 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10503 code = unsignedp ? LTU : LT;
10506 if (! unsignedp && integer_all_onesp (arg1))
10507 arg1 = integer_zero_node, code = LT;
10509 code = unsignedp ? LEU : LE;
10512 if (! unsignedp && integer_all_onesp (arg1))
10513 arg1 = integer_zero_node, code = GE;
10515 code = unsignedp ? GTU : GT;
10518 if (integer_onep (arg1))
10519 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10521 code = unsignedp ? GEU : GE;
10524 case UNORDERED_EXPR:
10550 /* Put a constant second. */
10551 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10553 tem = arg0; arg0 = arg1; arg1 = tem;
10554 code = swap_condition (code);
10557 /* If this is an equality or inequality test of a single bit, we can
10558 do this by shifting the bit being tested to the low-order bit and
10559 masking the result with the constant 1. If the condition was EQ,
10560 we xor it with 1. This does not require an scc insn and is faster
10561 than an scc insn even if we have it. */
10563 if ((code == NE || code == EQ)
10564 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10565 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10567 tree inner = TREE_OPERAND (arg0, 0);
10568 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10571 /* If INNER is a right shift of a constant and it plus BITNUM does
10572 not overflow, adjust BITNUM and INNER. */
10574 if (TREE_CODE (inner) == RSHIFT_EXPR
10575 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10576 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10577 && bitnum < TYPE_PRECISION (type)
10578 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10579 bitnum - TYPE_PRECISION (type)))
10581 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10582 inner = TREE_OPERAND (inner, 0);
10585 /* If we are going to be able to omit the AND below, we must do our
10586 operations as unsigned. If we must use the AND, we have a choice.
10587 Normally unsigned is faster, but for some machines signed is. */
10588 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10589 #ifdef LOAD_EXTEND_OP
10590 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10596 if (! get_subtarget (subtarget)
10597 || GET_MODE (subtarget) != operand_mode
10598 || ! safe_from_p (subtarget, inner, 1))
10601 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10604 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10605 size_int (bitnum), subtarget, ops_unsignedp);
10607 if (GET_MODE (op0) != mode)
10608 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10610 if ((code == EQ && ! invert) || (code == NE && invert))
10611 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10612 ops_unsignedp, OPTAB_LIB_WIDEN);
10614 /* Put the AND last so it can combine with more things. */
10615 if (bitnum != TYPE_PRECISION (type) - 1)
10616 op0 = expand_and (op0, const1_rtx, subtarget);
10621 /* Now see if we are likely to be able to do this. Return if not. */
10622 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10625 icode = setcc_gen_code[(int) code];
10626 if (icode == CODE_FOR_nothing
10627 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10629 /* We can only do this if it is one of the special cases that
10630 can be handled without an scc insn. */
10631 if ((code == LT && integer_zerop (arg1))
10632 || (! only_cheap && code == GE && integer_zerop (arg1)))
10634 else if (BRANCH_COST >= 0
10635 && ! only_cheap && (code == NE || code == EQ)
10636 && TREE_CODE (type) != REAL_TYPE
10637 && ((abs_optab->handlers[(int) operand_mode].insn_code
10638 != CODE_FOR_nothing)
10639 || (ffs_optab->handlers[(int) operand_mode].insn_code
10640 != CODE_FOR_nothing)))
10646 if (! get_subtarget (target)
10647 || GET_MODE (subtarget) != operand_mode
10648 || ! safe_from_p (subtarget, arg1, 1))
10651 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10652 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10655 target = gen_reg_rtx (mode);
10657 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10658 because, if the emit_store_flag does anything it will succeed and
10659 OP0 and OP1 will not be used subsequently. */
10661 result = emit_store_flag (target, code,
10662 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10663 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10664 operand_mode, unsignedp, 1);
10669 result = expand_binop (mode, xor_optab, result, const1_rtx,
10670 result, 0, OPTAB_LIB_WIDEN);
10674 /* If this failed, we have to do this with set/compare/jump/set code. */
10675 if (GET_CODE (target) != REG
10676 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10677 target = gen_reg_rtx (GET_MODE (target));
10679 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10680 result = compare_from_rtx (op0, op1, code, unsignedp,
10681 operand_mode, NULL_RTX, 0);
10682 if (GET_CODE (result) == CONST_INT)
10683 return (((result == const0_rtx && ! invert)
10684 || (result != const0_rtx && invert))
10685 ? const0_rtx : const1_rtx);
10687 label = gen_label_rtx ();
10688 if (bcc_gen_fctn[(int) code] == 0)
10691 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10692 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10693 emit_label (label);
10699 /* Stubs in case we haven't got a casesi insn. */
10700 #ifndef HAVE_casesi
10701 # define HAVE_casesi 0
10702 # define gen_casesi(a, b, c, d, e) (0)
10703 # define CODE_FOR_casesi CODE_FOR_nothing
10706 /* If the machine does not have a case insn that compares the bounds,
10707 this means extra overhead for dispatch tables, which raises the
10708 threshold for using them. */
10709 #ifndef CASE_VALUES_THRESHOLD
10710 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10711 #endif /* CASE_VALUES_THRESHOLD */
10714 case_values_threshold ()
10716 return CASE_VALUES_THRESHOLD;
10719 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10720 0 otherwise (i.e. if there is no casesi instruction). */
10722 try_casesi (index_type, index_expr, minval, range,
10723 table_label, default_label)
10724 tree index_type, index_expr, minval, range;
10725 rtx table_label ATTRIBUTE_UNUSED;
10728 enum machine_mode index_mode = SImode;
10729 int index_bits = GET_MODE_BITSIZE (index_mode);
10730 rtx op1, op2, index;
10731 enum machine_mode op_mode;
10736 /* Convert the index to SImode. */
10737 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10739 enum machine_mode omode = TYPE_MODE (index_type);
10740 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10742 /* We must handle the endpoints in the original mode. */
10743 index_expr = build (MINUS_EXPR, index_type,
10744 index_expr, minval);
10745 minval = integer_zero_node;
10746 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10747 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10748 omode, 1, 0, default_label);
10749 /* Now we can safely truncate. */
10750 index = convert_to_mode (index_mode, index, 0);
10754 if (TYPE_MODE (index_type) != index_mode)
10756 index_expr = convert (type_for_size (index_bits, 0),
10758 index_type = TREE_TYPE (index_expr);
10761 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10764 index = protect_from_queue (index, 0);
10765 do_pending_stack_adjust ();
10767 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10768 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10770 index = copy_to_mode_reg (op_mode, index);
10772 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10774 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10775 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10776 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10777 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10779 op1 = copy_to_mode_reg (op_mode, op1);
10781 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10783 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10784 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10785 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10786 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10788 op2 = copy_to_mode_reg (op_mode, op2);
10790 emit_jump_insn (gen_casesi (index, op1, op2,
10791 table_label, default_label));
10795 /* Attempt to generate a tablejump instruction; same concept. */
10796 #ifndef HAVE_tablejump
10797 #define HAVE_tablejump 0
10798 #define gen_tablejump(x, y) (0)
10801 /* Subroutine of the next function.
10803 INDEX is the value being switched on, with the lowest value
10804 in the table already subtracted.
10805 MODE is its expected mode (needed if INDEX is constant).
10806 RANGE is the length of the jump table.
10807 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10809 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10810 index value is out of range. */
10813 do_tablejump (index, mode, range, table_label, default_label)
10814 rtx index, range, table_label, default_label;
10815 enum machine_mode mode;
10819 /* Do an unsigned comparison (in the proper mode) between the index
10820 expression and the value which represents the length of the range.
10821 Since we just finished subtracting the lower bound of the range
10822 from the index expression, this comparison allows us to simultaneously
10823 check that the original index expression value is both greater than
10824 or equal to the minimum value of the range and less than or equal to
10825 the maximum value of the range. */
10827 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10830 /* If index is in range, it must fit in Pmode.
10831 Convert to Pmode so we can index with it. */
10833 index = convert_to_mode (Pmode, index, 1);
10835 /* Don't let a MEM slip thru, because then INDEX that comes
10836 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10837 and break_out_memory_refs will go to work on it and mess it up. */
10838 #ifdef PIC_CASE_VECTOR_ADDRESS
10839 if (flag_pic && GET_CODE (index) != REG)
10840 index = copy_to_mode_reg (Pmode, index);
10843 /* If flag_force_addr were to affect this address
10844 it could interfere with the tricky assumptions made
10845 about addresses that contain label-refs,
10846 which may be valid only very near the tablejump itself. */
10847 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10848 GET_MODE_SIZE, because this indicates how large insns are. The other
10849 uses should all be Pmode, because they are addresses. This code
10850 could fail if addresses and insns are not the same size. */
10851 index = gen_rtx_PLUS (Pmode,
10852 gen_rtx_MULT (Pmode, index,
10853 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10854 gen_rtx_LABEL_REF (Pmode, table_label));
10855 #ifdef PIC_CASE_VECTOR_ADDRESS
10857 index = PIC_CASE_VECTOR_ADDRESS (index);
10860 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10861 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10862 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10863 RTX_UNCHANGING_P (vector) = 1;
10864 convert_move (temp, vector, 0);
10866 emit_jump_insn (gen_tablejump (temp, table_label));
10868 /* If we are generating PIC code or if the table is PC-relative, the
10869 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10870 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10875 try_tablejump (index_type, index_expr, minval, range,
10876 table_label, default_label)
10877 tree index_type, index_expr, minval, range;
10878 rtx table_label, default_label;
10882 if (! HAVE_tablejump)
10885 index_expr = fold (build (MINUS_EXPR, index_type,
10886 convert (index_type, index_expr),
10887 convert (index_type, minval)));
10888 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10890 index = protect_from_queue (index, 0);
10891 do_pending_stack_adjust ();
10893 do_tablejump (index, TYPE_MODE (index_type),
10894 convert_modes (TYPE_MODE (index_type),
10895 TYPE_MODE (TREE_TYPE (range)),
10896 expand_expr (range, NULL_RTX,
10898 TREE_UNSIGNED (TREE_TYPE (range))),
10899 table_label, default_label);