1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first. */
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
65 #define STACK_PUSH_CODE PRE_INC
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
74 /* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
81 int (*lang_safe_from_p) PARAMS ((rtx, tree));
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* Don't check memory usage, since code is being emitted to check a memory
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
94 static int in_check_memory_usage;
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
99 /* This structure is used by move_by_pieces to describe the move to
101 struct move_by_pieces
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
116 /* This structure is used by store_by_pieces to describe the clear to
119 struct store_by_pieces
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
132 extern struct obstack permanent_obstack;
134 static rtx get_push_address PARAMS ((int));
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
140 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
150 struct store_by_pieces *));
151 static rtx get_subtarget PARAMS ((rtx));
152 static int is_zeros_p PARAMS ((tree));
153 static int mostly_zeros_p PARAMS ((tree));
154 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, unsigned int, int,
158 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int,
163 unsigned int, HOST_WIDE_INT, int));
164 static enum memory_use_mode
165 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
166 static rtx var_rtx PARAMS ((tree));
167 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
176 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
178 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
180 /* Record for each mode whether we can move a register directly to or
181 from an object of that mode in memory. If we can't, we won't try
182 to use that mode directly when accessing a field of that mode. */
184 static char direct_load[NUM_MACHINE_MODES];
185 static char direct_store[NUM_MACHINE_MODES];
187 /* If a memory-to-memory move would take MOVE_RATIO or more simple
188 move-instruction sequences, we will do a movstr or libcall instead. */
191 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
194 /* If we are optimizing for space (-Os), cut down the default move ratio. */
195 #define MOVE_RATIO (optimize_size ? 3 : 15)
199 /* This macro is used to determine whether move_by_pieces should be called
200 to perform a structure copy. */
201 #ifndef MOVE_BY_PIECES_P
202 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
206 /* This array records the insn_code of insns to perform block moves. */
207 enum insn_code movstr_optab[NUM_MACHINE_MODES];
209 /* This array records the insn_code of insns to perform block clears. */
210 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
212 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
214 #ifndef SLOW_UNALIGNED_ACCESS
215 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
218 /* This is run once per compilation to set up which modes can be used
219 directly in memory and to initialize the block move optab. */
225 enum machine_mode mode;
231 /* Try indexing by frame ptr and try by stack ptr.
232 It is known that on the Convex the stack ptr isn't a valid index.
233 With luck, one or the other is valid on any machine. */
234 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
235 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
237 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
238 pat = PATTERN (insn);
240 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
241 mode = (enum machine_mode) ((int) mode + 1))
246 direct_load[(int) mode] = direct_store[(int) mode] = 0;
247 PUT_MODE (mem, mode);
248 PUT_MODE (mem1, mode);
250 /* See if there is some register that can be used in this mode and
251 directly loaded or stored from memory. */
253 if (mode != VOIDmode && mode != BLKmode)
254 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
255 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
258 if (! HARD_REGNO_MODE_OK (regno, mode))
261 reg = gen_rtx_REG (mode, regno);
264 SET_DEST (pat) = reg;
265 if (recog (pat, insn, &num_clobbers) >= 0)
266 direct_load[(int) mode] = 1;
268 SET_SRC (pat) = mem1;
269 SET_DEST (pat) = reg;
270 if (recog (pat, insn, &num_clobbers) >= 0)
271 direct_load[(int) mode] = 1;
274 SET_DEST (pat) = mem;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_store[(int) mode] = 1;
279 SET_DEST (pat) = mem1;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_store[(int) mode] = 1;
288 /* This is run at the start of compiling a function. */
293 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
296 pending_stack_adjust = 0;
297 stack_pointer_delta = 0;
298 inhibit_defer_pop = 0;
300 apply_args_value = 0;
306 struct expr_status *p;
311 ggc_mark_rtx (p->x_saveregs_value);
312 ggc_mark_rtx (p->x_apply_args_value);
313 ggc_mark_rtx (p->x_forced_labels);
324 /* Small sanity check that the queue is empty at the end of a function. */
327 finish_expr_for_function ()
333 /* Manage the queue of increment instructions to be output
334 for POSTINCREMENT_EXPR expressions, etc. */
336 /* Queue up to increment (or change) VAR later. BODY says how:
337 BODY should be the same thing you would pass to emit_insn
338 to increment right away. It will go to emit_insn later on.
340 The value is a QUEUED expression to be used in place of VAR
341 where you want to guarantee the pre-incrementation value of VAR. */
344 enqueue_insn (var, body)
347 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
348 body, pending_chain);
349 return pending_chain;
352 /* Use protect_from_queue to convert a QUEUED expression
353 into something that you can put immediately into an instruction.
354 If the queued incrementation has not happened yet,
355 protect_from_queue returns the variable itself.
356 If the incrementation has happened, protect_from_queue returns a temp
357 that contains a copy of the old value of the variable.
359 Any time an rtx which might possibly be a QUEUED is to be put
360 into an instruction, it must be passed through protect_from_queue first.
361 QUEUED expressions are not meaningful in instructions.
363 Do not pass a value through protect_from_queue and then hold
364 on to it for a while before putting it in an instruction!
365 If the queue is flushed in between, incorrect code will result. */
368 protect_from_queue (x, modify)
372 RTX_CODE code = GET_CODE (x);
374 #if 0 /* A QUEUED can hang around after the queue is forced out. */
375 /* Shortcut for most common case. */
376 if (pending_chain == 0)
382 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
383 use of autoincrement. Make a copy of the contents of the memory
384 location rather than a copy of the address, but not if the value is
385 of mode BLKmode. Don't modify X in place since it might be
387 if (code == MEM && GET_MODE (x) != BLKmode
388 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
391 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
395 rtx temp = gen_reg_rtx (GET_MODE (x));
397 emit_insn_before (gen_move_insn (temp, new),
402 /* Copy the address into a pseudo, so that the returned value
403 remains correct across calls to emit_queue. */
404 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
407 /* Otherwise, recursively protect the subexpressions of all
408 the kinds of rtx's that can contain a QUEUED. */
411 rtx tem = protect_from_queue (XEXP (x, 0), 0);
412 if (tem != XEXP (x, 0))
418 else if (code == PLUS || code == MULT)
420 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
421 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
422 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
431 /* If the increment has not happened, use the variable itself. Copy it
432 into a new pseudo so that the value remains correct across calls to
434 if (QUEUED_INSN (x) == 0)
435 return copy_to_reg (QUEUED_VAR (x));
436 /* If the increment has happened and a pre-increment copy exists,
438 if (QUEUED_COPY (x) != 0)
439 return QUEUED_COPY (x);
440 /* The increment has happened but we haven't set up a pre-increment copy.
441 Set one up now, and use it. */
442 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
443 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
445 return QUEUED_COPY (x);
448 /* Return nonzero if X contains a QUEUED expression:
449 if it contains anything that will be altered by a queued increment.
450 We handle only combinations of MEM, PLUS, MINUS and MULT operators
451 since memory addresses generally contain only those. */
457 enum rtx_code code = GET_CODE (x);
463 return queued_subexp_p (XEXP (x, 0));
467 return (queued_subexp_p (XEXP (x, 0))
468 || queued_subexp_p (XEXP (x, 1)));
474 /* Perform all the pending incrementations. */
480 while ((p = pending_chain))
482 rtx body = QUEUED_BODY (p);
484 if (GET_CODE (body) == SEQUENCE)
486 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
487 emit_insn (QUEUED_BODY (p));
490 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
491 pending_chain = QUEUED_NEXT (p);
495 /* Copy data from FROM to TO, where the machine modes are not the same.
496 Both modes may be integer, or both may be floating.
497 UNSIGNEDP should be nonzero if FROM is an unsigned type.
498 This causes zero-extension instead of sign-extension. */
501 convert_move (to, from, unsignedp)
505 enum machine_mode to_mode = GET_MODE (to);
506 enum machine_mode from_mode = GET_MODE (from);
507 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
508 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
512 /* rtx code for making an equivalent value. */
513 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
515 to = protect_from_queue (to, 1);
516 from = protect_from_queue (from, 0);
518 if (to_real != from_real)
521 /* If FROM is a SUBREG that indicates that we have already done at least
522 the required extension, strip it. We don't handle such SUBREGs as
525 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
526 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
527 >= GET_MODE_SIZE (to_mode))
528 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
529 from = gen_lowpart (to_mode, from), from_mode = to_mode;
531 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
534 if (to_mode == from_mode
535 || (from_mode == VOIDmode && CONSTANT_P (from)))
537 emit_move_insn (to, from);
541 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
543 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
546 if (VECTOR_MODE_P (to_mode))
547 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
549 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
551 emit_move_insn (to, from);
555 if (to_real != from_real)
562 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
564 /* Try converting directly if the insn is supported. */
565 if ((code = can_extend_p (to_mode, from_mode, 0))
568 emit_unop_insn (code, to, from, UNKNOWN);
573 #ifdef HAVE_trunchfqf2
574 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
576 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
580 #ifdef HAVE_trunctqfqf2
581 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
583 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
587 #ifdef HAVE_truncsfqf2
588 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
590 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
594 #ifdef HAVE_truncdfqf2
595 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
597 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
601 #ifdef HAVE_truncxfqf2
602 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
604 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
608 #ifdef HAVE_trunctfqf2
609 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
611 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
616 #ifdef HAVE_trunctqfhf2
617 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
619 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
623 #ifdef HAVE_truncsfhf2
624 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
626 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
630 #ifdef HAVE_truncdfhf2
631 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
633 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
637 #ifdef HAVE_truncxfhf2
638 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
640 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
644 #ifdef HAVE_trunctfhf2
645 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
647 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
652 #ifdef HAVE_truncsftqf2
653 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
655 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
659 #ifdef HAVE_truncdftqf2
660 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
662 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
666 #ifdef HAVE_truncxftqf2
667 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
669 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
673 #ifdef HAVE_trunctftqf2
674 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
676 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
681 #ifdef HAVE_truncdfsf2
682 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
684 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
688 #ifdef HAVE_truncxfsf2
689 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
691 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
695 #ifdef HAVE_trunctfsf2
696 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
698 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
702 #ifdef HAVE_truncxfdf2
703 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
705 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
709 #ifdef HAVE_trunctfdf2
710 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
712 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
724 libcall = extendsfdf2_libfunc;
728 libcall = extendsfxf2_libfunc;
732 libcall = extendsftf2_libfunc;
744 libcall = truncdfsf2_libfunc;
748 libcall = extenddfxf2_libfunc;
752 libcall = extenddftf2_libfunc;
764 libcall = truncxfsf2_libfunc;
768 libcall = truncxfdf2_libfunc;
780 libcall = trunctfsf2_libfunc;
784 libcall = trunctfdf2_libfunc;
796 if (libcall == (rtx) 0)
797 /* This conversion is not implemented yet. */
801 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
803 insns = get_insns ();
805 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
810 /* Now both modes are integers. */
812 /* Handle expanding beyond a word. */
813 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
814 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
821 enum machine_mode lowpart_mode;
822 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
824 /* Try converting directly if the insn is supported. */
825 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 /* If FROM is a SUBREG, put it into a register. Do this
829 so that we always generate the same set of insns for
830 better cse'ing; if an intermediate assignment occurred,
831 we won't be doing the operation directly on the SUBREG. */
832 if (optimize > 0 && GET_CODE (from) == SUBREG)
833 from = force_reg (from_mode, from);
834 emit_unop_insn (code, to, from, equiv_code);
837 /* Next, try converting via full word. */
838 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
839 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
840 != CODE_FOR_nothing))
842 if (GET_CODE (to) == REG)
843 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
844 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
845 emit_unop_insn (code, to,
846 gen_lowpart (word_mode, to), equiv_code);
850 /* No special multiword conversion insn; do it by hand. */
853 /* Since we will turn this into a no conflict block, we must ensure
854 that the source does not overlap the target. */
856 if (reg_overlap_mentioned_p (to, from))
857 from = force_reg (from_mode, from);
859 /* Get a copy of FROM widened to a word, if necessary. */
860 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
861 lowpart_mode = word_mode;
863 lowpart_mode = from_mode;
865 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
867 lowpart = gen_lowpart (lowpart_mode, to);
868 emit_move_insn (lowpart, lowfrom);
870 /* Compute the value to put in each remaining word. */
872 fill_value = const0_rtx;
877 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
878 && STORE_FLAG_VALUE == -1)
880 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
882 fill_value = gen_reg_rtx (word_mode);
883 emit_insn (gen_slt (fill_value));
889 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
890 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
892 fill_value = convert_to_mode (word_mode, fill_value, 1);
896 /* Fill the remaining words. */
897 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
899 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
900 rtx subword = operand_subword (to, index, 1, to_mode);
905 if (fill_value != subword)
906 emit_move_insn (subword, fill_value);
909 insns = get_insns ();
912 emit_no_conflict_block (insns, to, from, NULL_RTX,
913 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
917 /* Truncating multi-word to a word or less. */
918 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
919 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 convert_move (to, gen_lowpart (word_mode, from), 0);
932 /* Handle pointer conversion. */ /* SPEE 900220. */
933 if (to_mode == PQImode)
935 if (from_mode != QImode)
936 from = convert_to_mode (QImode, from, unsignedp);
938 #ifdef HAVE_truncqipqi2
939 if (HAVE_truncqipqi2)
941 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
944 #endif /* HAVE_truncqipqi2 */
948 if (from_mode == PQImode)
950 if (to_mode != QImode)
952 from = convert_to_mode (QImode, from, unsignedp);
957 #ifdef HAVE_extendpqiqi2
958 if (HAVE_extendpqiqi2)
960 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
963 #endif /* HAVE_extendpqiqi2 */
968 if (to_mode == PSImode)
970 if (from_mode != SImode)
971 from = convert_to_mode (SImode, from, unsignedp);
973 #ifdef HAVE_truncsipsi2
974 if (HAVE_truncsipsi2)
976 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
979 #endif /* HAVE_truncsipsi2 */
983 if (from_mode == PSImode)
985 if (to_mode != SImode)
987 from = convert_to_mode (SImode, from, unsignedp);
992 #ifdef HAVE_extendpsisi2
993 if (! unsignedp && HAVE_extendpsisi2)
995 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
998 #endif /* HAVE_extendpsisi2 */
999 #ifdef HAVE_zero_extendpsisi2
1000 if (unsignedp && HAVE_zero_extendpsisi2)
1002 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1005 #endif /* HAVE_zero_extendpsisi2 */
1010 if (to_mode == PDImode)
1012 if (from_mode != DImode)
1013 from = convert_to_mode (DImode, from, unsignedp);
1015 #ifdef HAVE_truncdipdi2
1016 if (HAVE_truncdipdi2)
1018 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1021 #endif /* HAVE_truncdipdi2 */
1025 if (from_mode == PDImode)
1027 if (to_mode != DImode)
1029 from = convert_to_mode (DImode, from, unsignedp);
1034 #ifdef HAVE_extendpdidi2
1035 if (HAVE_extendpdidi2)
1037 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1040 #endif /* HAVE_extendpdidi2 */
1045 /* Now follow all the conversions between integers
1046 no more than a word long. */
1048 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1049 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1050 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1051 GET_MODE_BITSIZE (from_mode)))
1053 if (!((GET_CODE (from) == MEM
1054 && ! MEM_VOLATILE_P (from)
1055 && direct_load[(int) to_mode]
1056 && ! mode_dependent_address_p (XEXP (from, 0)))
1057 || GET_CODE (from) == REG
1058 || GET_CODE (from) == SUBREG))
1059 from = force_reg (from_mode, from);
1060 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1061 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1062 from = copy_to_reg (from);
1063 emit_move_insn (to, gen_lowpart (to_mode, from));
1067 /* Handle extension. */
1068 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1070 /* Convert directly if that works. */
1071 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1072 != CODE_FOR_nothing)
1074 emit_unop_insn (code, to, from, equiv_code);
1079 enum machine_mode intermediate;
1083 /* Search for a mode to convert via. */
1084 for (intermediate = from_mode; intermediate != VOIDmode;
1085 intermediate = GET_MODE_WIDER_MODE (intermediate))
1086 if (((can_extend_p (to_mode, intermediate, unsignedp)
1087 != CODE_FOR_nothing)
1088 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1089 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1090 GET_MODE_BITSIZE (intermediate))))
1091 && (can_extend_p (intermediate, from_mode, unsignedp)
1092 != CODE_FOR_nothing))
1094 convert_move (to, convert_to_mode (intermediate, from,
1095 unsignedp), unsignedp);
1099 /* No suitable intermediate mode.
1100 Generate what we need with shifts. */
1101 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1102 - GET_MODE_BITSIZE (from_mode), 0);
1103 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1104 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1106 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1109 emit_move_insn (to, tmp);
1114 /* Support special truncate insns for certain modes. */
1116 if (from_mode == DImode && to_mode == SImode)
1118 #ifdef HAVE_truncdisi2
1119 if (HAVE_truncdisi2)
1121 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1125 convert_move (to, force_reg (from_mode, from), unsignedp);
1129 if (from_mode == DImode && to_mode == HImode)
1131 #ifdef HAVE_truncdihi2
1132 if (HAVE_truncdihi2)
1134 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1138 convert_move (to, force_reg (from_mode, from), unsignedp);
1142 if (from_mode == DImode && to_mode == QImode)
1144 #ifdef HAVE_truncdiqi2
1145 if (HAVE_truncdiqi2)
1147 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1151 convert_move (to, force_reg (from_mode, from), unsignedp);
1155 if (from_mode == SImode && to_mode == HImode)
1157 #ifdef HAVE_truncsihi2
1158 if (HAVE_truncsihi2)
1160 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1168 if (from_mode == SImode && to_mode == QImode)
1170 #ifdef HAVE_truncsiqi2
1171 if (HAVE_truncsiqi2)
1173 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1181 if (from_mode == HImode && to_mode == QImode)
1183 #ifdef HAVE_trunchiqi2
1184 if (HAVE_trunchiqi2)
1186 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1194 if (from_mode == TImode && to_mode == DImode)
1196 #ifdef HAVE_trunctidi2
1197 if (HAVE_trunctidi2)
1199 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1207 if (from_mode == TImode && to_mode == SImode)
1209 #ifdef HAVE_trunctisi2
1210 if (HAVE_trunctisi2)
1212 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1220 if (from_mode == TImode && to_mode == HImode)
1222 #ifdef HAVE_trunctihi2
1223 if (HAVE_trunctihi2)
1225 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1233 if (from_mode == TImode && to_mode == QImode)
1235 #ifdef HAVE_trunctiqi2
1236 if (HAVE_trunctiqi2)
1238 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1246 /* Handle truncation of volatile memrefs, and so on;
1247 the things that couldn't be truncated directly,
1248 and for which there was no special instruction. */
1249 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1251 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1252 emit_move_insn (to, temp);
1256 /* Mode combination is not recognized. */
1260 /* Return an rtx for a value that would result
1261 from converting X to mode MODE.
1262 Both X and MODE may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1264 This can be done by referring to a part of X in place
1265 or by copying to a new temporary with conversion.
1267 This function *must not* call protect_from_queue
1268 except when putting X into an insn (in which case convert_move does it). */
1271 convert_to_mode (mode, x, unsignedp)
1272 enum machine_mode mode;
1276 return convert_modes (mode, VOIDmode, x, unsignedp);
1279 /* Return an rtx for a value that would result
1280 from converting X from mode OLDMODE to mode MODE.
1281 Both modes may be floating, or both integer.
1282 UNSIGNEDP is nonzero if X is an unsigned value.
1284 This can be done by referring to a part of X in place
1285 or by copying to a new temporary with conversion.
1287 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1289 This function *must not* call protect_from_queue
1290 except when putting X into an insn (in which case convert_move does it). */
1293 convert_modes (mode, oldmode, x, unsignedp)
1294 enum machine_mode mode, oldmode;
1300 /* If FROM is a SUBREG that indicates that we have already done at least
1301 the required extension, strip it. */
1303 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1304 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1305 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1306 x = gen_lowpart (mode, x);
1308 if (GET_MODE (x) != VOIDmode)
1309 oldmode = GET_MODE (x);
1311 if (mode == oldmode)
1314 /* There is one case that we must handle specially: If we are converting
1315 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1316 we are to interpret the constant as unsigned, gen_lowpart will do
1317 the wrong if the constant appears negative. What we want to do is
1318 make the high-order word of the constant zero, not all ones. */
1320 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1321 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1322 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1324 HOST_WIDE_INT val = INTVAL (x);
1326 if (oldmode != VOIDmode
1327 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1329 int width = GET_MODE_BITSIZE (oldmode);
1331 /* We need to zero extend VAL. */
1332 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1335 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1338 /* We can do this with a gen_lowpart if both desired and current modes
1339 are integer, and this is either a constant integer, a register, or a
1340 non-volatile MEM. Except for the constant case where MODE is no
1341 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1343 if ((GET_CODE (x) == CONST_INT
1344 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1345 || (GET_MODE_CLASS (mode) == MODE_INT
1346 && GET_MODE_CLASS (oldmode) == MODE_INT
1347 && (GET_CODE (x) == CONST_DOUBLE
1348 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1349 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1350 && direct_load[(int) mode])
1351 || (GET_CODE (x) == REG
1352 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1353 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1355 /* ?? If we don't know OLDMODE, we have to assume here that
1356 X does not need sign- or zero-extension. This may not be
1357 the case, but it's the best we can do. */
1358 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1359 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1361 HOST_WIDE_INT val = INTVAL (x);
1362 int width = GET_MODE_BITSIZE (oldmode);
1364 /* We must sign or zero-extend in this case. Start by
1365 zero-extending, then sign extend if we need to. */
1366 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1368 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1369 val |= (HOST_WIDE_INT) (-1) << width;
1371 return GEN_INT (trunc_int_for_mode (val, mode));
1374 return gen_lowpart (mode, x);
1377 temp = gen_reg_rtx (mode);
1378 convert_move (temp, x, unsignedp);
1382 /* This macro is used to determine what the largest unit size that
1383 move_by_pieces can use is. */
1385 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1386 move efficiently, as opposed to MOVE_MAX which is the maximum
1387 number of bytes we can move with a single instruction. */
1389 #ifndef MOVE_MAX_PIECES
1390 #define MOVE_MAX_PIECES MOVE_MAX
1393 /* Generate several move instructions to copy LEN bytes from block FROM to
1394 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1395 and TO through protect_from_queue before calling.
1397 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1398 used to push FROM to the stack.
1400 ALIGN is maximum alignment we can assume. */
1403 move_by_pieces (to, from, len, align)
1405 unsigned HOST_WIDE_INT len;
1408 struct move_by_pieces data;
1409 rtx to_addr, from_addr = XEXP (from, 0);
1410 unsigned int max_size = MOVE_MAX_PIECES + 1;
1411 enum machine_mode mode = VOIDmode, tmode;
1412 enum insn_code icode;
1415 data.from_addr = from_addr;
1418 to_addr = XEXP (to, 0);
1421 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1422 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1424 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1431 #ifdef STACK_GROWS_DOWNWARD
1437 data.to_addr = to_addr;
1440 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1441 || GET_CODE (from_addr) == POST_INC
1442 || GET_CODE (from_addr) == POST_DEC);
1444 data.explicit_inc_from = 0;
1445 data.explicit_inc_to = 0;
1446 if (data.reverse) data.offset = len;
1449 /* If copying requires more than two move insns,
1450 copy addresses to registers (to make displacements shorter)
1451 and use post-increment if available. */
1452 if (!(data.autinc_from && data.autinc_to)
1453 && move_by_pieces_ninsns (len, align) > 2)
1455 /* Find the mode of the largest move... */
1456 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1457 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1458 if (GET_MODE_SIZE (tmode) < max_size)
1461 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1463 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1464 data.autinc_from = 1;
1465 data.explicit_inc_from = -1;
1467 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1469 data.from_addr = copy_addr_to_reg (from_addr);
1470 data.autinc_from = 1;
1471 data.explicit_inc_from = 1;
1473 if (!data.autinc_from && CONSTANT_P (from_addr))
1474 data.from_addr = copy_addr_to_reg (from_addr);
1475 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1477 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1479 data.explicit_inc_to = -1;
1481 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1483 data.to_addr = copy_addr_to_reg (to_addr);
1485 data.explicit_inc_to = 1;
1487 if (!data.autinc_to && CONSTANT_P (to_addr))
1488 data.to_addr = copy_addr_to_reg (to_addr);
1491 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1492 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1493 align = MOVE_MAX * BITS_PER_UNIT;
1495 /* First move what we can in the largest integer mode, then go to
1496 successively smaller modes. */
1498 while (max_size > 1)
1500 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1501 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1502 if (GET_MODE_SIZE (tmode) < max_size)
1505 if (mode == VOIDmode)
1508 icode = mov_optab->handlers[(int) mode].insn_code;
1509 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1510 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1512 max_size = GET_MODE_SIZE (mode);
1515 /* The code above should have handled everything. */
1520 /* Return number of insns required to move L bytes by pieces.
1521 ALIGN (in bits) is maximum alignment we can assume. */
1523 static unsigned HOST_WIDE_INT
1524 move_by_pieces_ninsns (l, align)
1525 unsigned HOST_WIDE_INT l;
1528 unsigned HOST_WIDE_INT n_insns = 0;
1529 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1531 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1532 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1533 align = MOVE_MAX * BITS_PER_UNIT;
1535 while (max_size > 1)
1537 enum machine_mode mode = VOIDmode, tmode;
1538 enum insn_code icode;
1540 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1541 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1542 if (GET_MODE_SIZE (tmode) < max_size)
1545 if (mode == VOIDmode)
1548 icode = mov_optab->handlers[(int) mode].insn_code;
1549 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1550 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1552 max_size = GET_MODE_SIZE (mode);
1560 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1561 with move instructions for mode MODE. GENFUN is the gen_... function
1562 to make a move insn for that mode. DATA has all the other info. */
1565 move_by_pieces_1 (genfun, mode, data)
1566 rtx (*genfun) PARAMS ((rtx, ...));
1567 enum machine_mode mode;
1568 struct move_by_pieces *data;
1570 unsigned int size = GET_MODE_SIZE (mode);
1571 rtx to1 = NULL_RTX, from1;
1573 while (data->len >= size)
1576 data->offset -= size;
1580 if (data->autinc_to)
1582 to1 = replace_equiv_address (data->to, data->to_addr);
1583 to1 = adjust_address (to1, mode, 0);
1586 to1 = adjust_address (data->to, mode, data->offset);
1589 if (data->autinc_from)
1591 from1 = replace_equiv_address (data->from, data->from_addr);
1592 from1 = adjust_address (from1, mode, 0);
1595 from1 = adjust_address (data->from, mode, data->offset);
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1598 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1599 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1600 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1603 emit_insn ((*genfun) (to1, from1));
1606 #ifdef PUSH_ROUNDING
1607 emit_single_push_insn (mode, from1, NULL);
1613 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1614 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1615 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1616 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1618 if (! data->reverse)
1619 data->offset += size;
1625 /* Emit code to move a block Y to a block X.
1626 This may be done with string-move instructions,
1627 with multiple scalar move instructions, or with a library call.
1629 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1631 SIZE is an rtx that says how long they are.
1632 ALIGN is the maximum alignment we can assume they have.
1634 Return the address of the new block, if memcpy is called and returns it,
1638 emit_block_move (x, y, size)
1643 #ifdef TARGET_MEM_FUNCTIONS
1645 tree call_expr, arg_list;
1647 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1649 if (GET_MODE (x) != BLKmode)
1652 if (GET_MODE (y) != BLKmode)
1655 x = protect_from_queue (x, 1);
1656 y = protect_from_queue (y, 0);
1657 size = protect_from_queue (size, 0);
1659 if (GET_CODE (x) != MEM)
1661 if (GET_CODE (y) != MEM)
1666 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1667 move_by_pieces (x, y, INTVAL (size), align);
1670 /* Try the most limited insn first, because there's no point
1671 including more than one in the machine description unless
1672 the more limited one has some advantage. */
1674 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1675 enum machine_mode mode;
1677 /* Since this is a move insn, we don't care about volatility. */
1680 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1681 mode = GET_MODE_WIDER_MODE (mode))
1683 enum insn_code code = movstr_optab[(int) mode];
1684 insn_operand_predicate_fn pred;
1686 if (code != CODE_FOR_nothing
1687 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1688 here because if SIZE is less than the mode mask, as it is
1689 returned by the macro, it will definitely be less than the
1690 actual mode mask. */
1691 && ((GET_CODE (size) == CONST_INT
1692 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1693 <= (GET_MODE_MASK (mode) >> 1)))
1694 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1695 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1696 || (*pred) (x, BLKmode))
1697 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1698 || (*pred) (y, BLKmode))
1699 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1700 || (*pred) (opalign, VOIDmode)))
1703 rtx last = get_last_insn ();
1706 op2 = convert_to_mode (mode, size, 1);
1707 pred = insn_data[(int) code].operand[2].predicate;
1708 if (pred != 0 && ! (*pred) (op2, mode))
1709 op2 = copy_to_mode_reg (mode, op2);
1711 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1719 delete_insns_since (last);
1725 /* X, Y, or SIZE may have been passed through protect_from_queue.
1727 It is unsafe to save the value generated by protect_from_queue
1728 and reuse it later. Consider what happens if emit_queue is
1729 called before the return value from protect_from_queue is used.
1731 Expansion of the CALL_EXPR below will call emit_queue before
1732 we are finished emitting RTL for argument setup. So if we are
1733 not careful we could get the wrong value for an argument.
1735 To avoid this problem we go ahead and emit code to copy X, Y &
1736 SIZE into new pseudos. We can then place those new pseudos
1737 into an RTL_EXPR and use them later, even after a call to
1740 Note this is not strictly needed for library calls since they
1741 do not call emit_queue before loading their arguments. However,
1742 we may need to have library calls call emit_queue in the future
1743 since failing to do so could cause problems for targets which
1744 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1745 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1746 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1748 #ifdef TARGET_MEM_FUNCTIONS
1749 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1751 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1752 TREE_UNSIGNED (integer_type_node));
1753 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1756 #ifdef TARGET_MEM_FUNCTIONS
1757 /* It is incorrect to use the libcall calling conventions to call
1758 memcpy in this context.
1760 This could be a user call to memcpy and the user may wish to
1761 examine the return value from memcpy.
1763 For targets where libcalls and normal calls have different conventions
1764 for returning pointers, we could end up generating incorrect code.
1766 So instead of using a libcall sequence we build up a suitable
1767 CALL_EXPR and expand the call in the normal fashion. */
1768 if (fn == NULL_TREE)
1772 /* This was copied from except.c, I don't know if all this is
1773 necessary in this context or not. */
1774 fn = get_identifier ("memcpy");
1775 fntype = build_pointer_type (void_type_node);
1776 fntype = build_function_type (fntype, NULL_TREE);
1777 fn = build_decl (FUNCTION_DECL, fn, fntype);
1778 ggc_add_tree_root (&fn, 1);
1779 DECL_EXTERNAL (fn) = 1;
1780 TREE_PUBLIC (fn) = 1;
1781 DECL_ARTIFICIAL (fn) = 1;
1782 TREE_NOTHROW (fn) = 1;
1783 make_decl_rtl (fn, NULL);
1784 assemble_external (fn);
1787 /* We need to make an argument list for the function call.
1789 memcpy has three arguments, the first two are void * addresses and
1790 the last is a size_t byte count for the copy. */
1792 = build_tree_list (NULL_TREE,
1793 make_tree (build_pointer_type (void_type_node), x));
1794 TREE_CHAIN (arg_list)
1795 = build_tree_list (NULL_TREE,
1796 make_tree (build_pointer_type (void_type_node), y));
1797 TREE_CHAIN (TREE_CHAIN (arg_list))
1798 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1799 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1801 /* Now we have to build up the CALL_EXPR itself. */
1802 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1803 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1804 call_expr, arg_list, NULL_TREE);
1805 TREE_SIDE_EFFECTS (call_expr) = 1;
1807 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1809 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1810 VOIDmode, 3, y, Pmode, x, Pmode,
1811 convert_to_mode (TYPE_MODE (integer_type_node), size,
1812 TREE_UNSIGNED (integer_type_node)),
1813 TYPE_MODE (integer_type_node));
1820 /* Copy all or part of a value X into registers starting at REGNO.
1821 The number of registers to be filled is NREGS. */
1824 move_block_to_reg (regno, x, nregs, mode)
1828 enum machine_mode mode;
1831 #ifdef HAVE_load_multiple
1839 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1840 x = validize_mem (force_const_mem (mode, x));
1842 /* See if the machine can do this with a load multiple insn. */
1843 #ifdef HAVE_load_multiple
1844 if (HAVE_load_multiple)
1846 last = get_last_insn ();
1847 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1855 delete_insns_since (last);
1859 for (i = 0; i < nregs; i++)
1860 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1861 operand_subword_force (x, i, mode));
1864 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1865 The number of registers to be filled is NREGS. SIZE indicates the number
1866 of bytes in the object X. */
1869 move_block_from_reg (regno, x, nregs, size)
1876 #ifdef HAVE_store_multiple
1880 enum machine_mode mode;
1885 /* If SIZE is that of a mode no bigger than a word, just use that
1886 mode's store operation. */
1887 if (size <= UNITS_PER_WORD
1888 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1890 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1894 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1895 to the left before storing to memory. Note that the previous test
1896 doesn't handle all cases (e.g. SIZE == 3). */
1897 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1899 rtx tem = operand_subword (x, 0, 1, BLKmode);
1905 shift = expand_shift (LSHIFT_EXPR, word_mode,
1906 gen_rtx_REG (word_mode, regno),
1907 build_int_2 ((UNITS_PER_WORD - size)
1908 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1909 emit_move_insn (tem, shift);
1913 /* See if the machine can do this with a store multiple insn. */
1914 #ifdef HAVE_store_multiple
1915 if (HAVE_store_multiple)
1917 last = get_last_insn ();
1918 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1926 delete_insns_since (last);
1930 for (i = 0; i < nregs; i++)
1932 rtx tem = operand_subword (x, i, 1, BLKmode);
1937 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1941 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1942 registers represented by a PARALLEL. SSIZE represents the total size of
1943 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1945 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1946 the balance will be in what would be the low-order memory addresses, i.e.
1947 left justified for big endian, right justified for little endian. This
1948 happens to be true for the targets currently using this support. If this
1949 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1953 emit_group_load (dst, orig_src, ssize, align)
1961 if (GET_CODE (dst) != PARALLEL)
1964 /* Check for a NULL entry, used to indicate that the parameter goes
1965 both on the stack and in registers. */
1966 if (XEXP (XVECEXP (dst, 0, 0), 0))
1971 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1973 /* Process the pieces. */
1974 for (i = start; i < XVECLEN (dst, 0); i++)
1976 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1977 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1978 unsigned int bytelen = GET_MODE_SIZE (mode);
1981 /* Handle trailing fragments that run over the size of the struct. */
1982 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1984 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1985 bytelen = ssize - bytepos;
1990 /* If we won't be loading directly from memory, protect the real source
1991 from strange tricks we might play; but make sure that the source can
1992 be loaded directly into the destination. */
1994 if (GET_CODE (orig_src) != MEM
1995 && (!CONSTANT_P (orig_src)
1996 || (GET_MODE (orig_src) != mode
1997 && GET_MODE (orig_src) != VOIDmode)))
1999 if (GET_MODE (orig_src) == VOIDmode)
2000 src = gen_reg_rtx (mode);
2002 src = gen_reg_rtx (GET_MODE (orig_src));
2003 emit_move_insn (src, orig_src);
2006 /* Optimize the access just a bit. */
2007 if (GET_CODE (src) == MEM
2008 && align >= GET_MODE_ALIGNMENT (mode)
2009 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2010 && bytelen == GET_MODE_SIZE (mode))
2012 tmps[i] = gen_reg_rtx (mode);
2013 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2015 else if (GET_CODE (src) == CONCAT)
2018 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2019 tmps[i] = XEXP (src, 0);
2020 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2021 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2022 tmps[i] = XEXP (src, 1);
2023 else if (bytepos == 0)
2026 mem = assign_stack_temp (GET_MODE (src),
2027 GET_MODE_SIZE (GET_MODE (src)), 0);
2028 emit_move_insn (mem, src);
2029 tmps[i] = change_address (mem, mode, XEXP (mem, 0));
2034 else if (CONSTANT_P (src)
2035 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2038 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2039 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2040 mode, mode, align, ssize);
2042 if (BYTES_BIG_ENDIAN && shift)
2043 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2044 tmps[i], 0, OPTAB_WIDEN);
2049 /* Copy the extracted pieces into the proper (probable) hard regs. */
2050 for (i = start; i < XVECLEN (dst, 0); i++)
2051 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2054 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2055 registers represented by a PARALLEL. SSIZE represents the total size of
2056 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2059 emit_group_store (orig_dst, src, ssize, align)
2067 if (GET_CODE (src) != PARALLEL)
2070 /* Check for a NULL entry, used to indicate that the parameter goes
2071 both on the stack and in registers. */
2072 if (XEXP (XVECEXP (src, 0, 0), 0))
2077 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2079 /* Copy the (probable) hard regs into pseudos. */
2080 for (i = start; i < XVECLEN (src, 0); i++)
2082 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2083 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2084 emit_move_insn (tmps[i], reg);
2088 /* If we won't be storing directly into memory, protect the real destination
2089 from strange tricks we might play. */
2091 if (GET_CODE (dst) == PARALLEL)
2095 /* We can get a PARALLEL dst if there is a conditional expression in
2096 a return statement. In that case, the dst and src are the same,
2097 so no action is necessary. */
2098 if (rtx_equal_p (dst, src))
2101 /* It is unclear if we can ever reach here, but we may as well handle
2102 it. Allocate a temporary, and split this into a store/load to/from
2105 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2106 emit_group_store (temp, src, ssize, align);
2107 emit_group_load (dst, temp, ssize, align);
2110 else if (GET_CODE (dst) != MEM)
2112 dst = gen_reg_rtx (GET_MODE (orig_dst));
2113 /* Make life a bit easier for combine. */
2114 emit_move_insn (dst, const0_rtx);
2117 /* Process the pieces. */
2118 for (i = start; i < XVECLEN (src, 0); i++)
2120 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2121 enum machine_mode mode = GET_MODE (tmps[i]);
2122 unsigned int bytelen = GET_MODE_SIZE (mode);
2124 /* Handle trailing fragments that run over the size of the struct. */
2125 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2127 if (BYTES_BIG_ENDIAN)
2129 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2130 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2131 tmps[i], 0, OPTAB_WIDEN);
2133 bytelen = ssize - bytepos;
2136 /* Optimize the access just a bit. */
2137 if (GET_CODE (dst) == MEM
2138 && align >= GET_MODE_ALIGNMENT (mode)
2139 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2140 && bytelen == GET_MODE_SIZE (mode))
2141 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2143 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2144 mode, tmps[i], align, ssize);
2149 /* Copy from the pseudo into the (probable) hard reg. */
2150 if (GET_CODE (dst) == REG)
2151 emit_move_insn (orig_dst, dst);
2154 /* Generate code to copy a BLKmode object of TYPE out of a
2155 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2156 is null, a stack temporary is created. TGTBLK is returned.
2158 The primary purpose of this routine is to handle functions
2159 that return BLKmode structures in registers. Some machines
2160 (the PA for example) want to return all small structures
2161 in registers regardless of the structure's alignment. */
2164 copy_blkmode_from_reg (tgtblk, srcreg, type)
2169 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2170 rtx src = NULL, dst = NULL;
2171 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2172 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2176 tgtblk = assign_temp (build_qualified_type (type,
2178 | TYPE_QUAL_CONST)),
2180 preserve_temp_slots (tgtblk);
2183 /* This code assumes srcreg is at least a full word. If it isn't,
2184 copy it into a new pseudo which is a full word. */
2185 if (GET_MODE (srcreg) != BLKmode
2186 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2187 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2189 /* Structures whose size is not a multiple of a word are aligned
2190 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2191 machine, this means we must skip the empty high order bytes when
2192 calculating the bit offset. */
2193 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2194 big_endian_correction
2195 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2197 /* Copy the structure BITSIZE bites at a time.
2199 We could probably emit more efficient code for machines which do not use
2200 strict alignment, but it doesn't seem worth the effort at the current
2202 for (bitpos = 0, xbitpos = big_endian_correction;
2203 bitpos < bytes * BITS_PER_UNIT;
2204 bitpos += bitsize, xbitpos += bitsize)
2206 /* We need a new source operand each time xbitpos is on a
2207 word boundary and when xbitpos == big_endian_correction
2208 (the first time through). */
2209 if (xbitpos % BITS_PER_WORD == 0
2210 || xbitpos == big_endian_correction)
2211 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2214 /* We need a new destination operand each time bitpos is on
2216 if (bitpos % BITS_PER_WORD == 0)
2217 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2219 /* Use xbitpos for the source extraction (right justified) and
2220 xbitpos for the destination store (left justified). */
2221 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2222 extract_bit_field (src, bitsize,
2223 xbitpos % BITS_PER_WORD, 1,
2224 NULL_RTX, word_mode, word_mode,
2225 bitsize, BITS_PER_WORD),
2226 bitsize, BITS_PER_WORD);
2232 /* Add a USE expression for REG to the (possibly empty) list pointed
2233 to by CALL_FUSAGE. REG must denote a hard register. */
2236 use_reg (call_fusage, reg)
2237 rtx *call_fusage, reg;
2239 if (GET_CODE (reg) != REG
2240 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2244 = gen_rtx_EXPR_LIST (VOIDmode,
2245 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2248 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2249 starting at REGNO. All of these registers must be hard registers. */
2252 use_regs (call_fusage, regno, nregs)
2259 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2262 for (i = 0; i < nregs; i++)
2263 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2266 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2267 PARALLEL REGS. This is for calls that pass values in multiple
2268 non-contiguous locations. The Irix 6 ABI has examples of this. */
2271 use_group_regs (call_fusage, regs)
2277 for (i = 0; i < XVECLEN (regs, 0); i++)
2279 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2281 /* A NULL entry means the parameter goes both on the stack and in
2282 registers. This can also be a MEM for targets that pass values
2283 partially on the stack and partially in registers. */
2284 if (reg != 0 && GET_CODE (reg) == REG)
2285 use_reg (call_fusage, reg);
2291 can_store_by_pieces (len, constfun, constfundata, align)
2292 unsigned HOST_WIDE_INT len;
2293 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2297 unsigned HOST_WIDE_INT max_size, l;
2298 HOST_WIDE_INT offset = 0;
2299 enum machine_mode mode, tmode;
2300 enum insn_code icode;
2304 if (! MOVE_BY_PIECES_P (len, align))
2307 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2308 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2309 align = MOVE_MAX * BITS_PER_UNIT;
2311 /* We would first store what we can in the largest integer mode, then go to
2312 successively smaller modes. */
2315 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2320 max_size = MOVE_MAX_PIECES + 1;
2321 while (max_size > 1)
2323 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2324 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2325 if (GET_MODE_SIZE (tmode) < max_size)
2328 if (mode == VOIDmode)
2331 icode = mov_optab->handlers[(int) mode].insn_code;
2332 if (icode != CODE_FOR_nothing
2333 && align >= GET_MODE_ALIGNMENT (mode))
2335 unsigned int size = GET_MODE_SIZE (mode);
2342 cst = (*constfun) (constfundata, offset, mode);
2343 if (!LEGITIMATE_CONSTANT_P (cst))
2353 max_size = GET_MODE_SIZE (mode);
2356 /* The code above should have handled everything. */
2364 /* Generate several move instructions to store LEN bytes generated by
2365 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2366 pointer which will be passed as argument in every CONSTFUN call.
2367 ALIGN is maximum alignment we can assume. */
2370 store_by_pieces (to, len, constfun, constfundata, align)
2372 unsigned HOST_WIDE_INT len;
2373 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2377 struct store_by_pieces data;
2379 if (! MOVE_BY_PIECES_P (len, align))
2381 to = protect_from_queue (to, 1);
2382 data.constfun = constfun;
2383 data.constfundata = constfundata;
2386 store_by_pieces_1 (&data, align);
2389 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2390 rtx with BLKmode). The caller must pass TO through protect_from_queue
2391 before calling. ALIGN is maximum alignment we can assume. */
2394 clear_by_pieces (to, len, align)
2396 unsigned HOST_WIDE_INT len;
2399 struct store_by_pieces data;
2401 data.constfun = clear_by_pieces_1;
2402 data.constfundata = NULL;
2405 store_by_pieces_1 (&data, align);
2408 /* Callback routine for clear_by_pieces.
2409 Return const0_rtx unconditionally. */
2412 clear_by_pieces_1 (data, offset, mode)
2413 PTR data ATTRIBUTE_UNUSED;
2414 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2415 enum machine_mode mode ATTRIBUTE_UNUSED;
2420 /* Subroutine of clear_by_pieces and store_by_pieces.
2421 Generate several move instructions to store LEN bytes of block TO. (A MEM
2422 rtx with BLKmode). The caller must pass TO through protect_from_queue
2423 before calling. ALIGN is maximum alignment we can assume. */
2426 store_by_pieces_1 (data, align)
2427 struct store_by_pieces *data;
2430 rtx to_addr = XEXP (data->to, 0);
2431 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2432 enum machine_mode mode = VOIDmode, tmode;
2433 enum insn_code icode;
2436 data->to_addr = to_addr;
2438 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2439 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2441 data->explicit_inc_to = 0;
2443 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2445 data->offset = data->len;
2447 /* If storing requires more than two move insns,
2448 copy addresses to registers (to make displacements shorter)
2449 and use post-increment if available. */
2450 if (!data->autinc_to
2451 && move_by_pieces_ninsns (data->len, align) > 2)
2453 /* Determine the main mode we'll be using. */
2454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2456 if (GET_MODE_SIZE (tmode) < max_size)
2459 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2461 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2462 data->autinc_to = 1;
2463 data->explicit_inc_to = -1;
2466 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2467 && ! data->autinc_to)
2469 data->to_addr = copy_addr_to_reg (to_addr);
2470 data->autinc_to = 1;
2471 data->explicit_inc_to = 1;
2474 if ( !data->autinc_to && CONSTANT_P (to_addr))
2475 data->to_addr = copy_addr_to_reg (to_addr);
2478 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2479 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2480 align = MOVE_MAX * BITS_PER_UNIT;
2482 /* First store what we can in the largest integer mode, then go to
2483 successively smaller modes. */
2485 while (max_size > 1)
2487 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2488 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2489 if (GET_MODE_SIZE (tmode) < max_size)
2492 if (mode == VOIDmode)
2495 icode = mov_optab->handlers[(int) mode].insn_code;
2496 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2497 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2499 max_size = GET_MODE_SIZE (mode);
2502 /* The code above should have handled everything. */
2507 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2508 with move instructions for mode MODE. GENFUN is the gen_... function
2509 to make a move insn for that mode. DATA has all the other info. */
2512 store_by_pieces_2 (genfun, mode, data)
2513 rtx (*genfun) PARAMS ((rtx, ...));
2514 enum machine_mode mode;
2515 struct store_by_pieces *data;
2517 unsigned int size = GET_MODE_SIZE (mode);
2520 while (data->len >= size)
2523 data->offset -= size;
2525 if (data->autinc_to)
2527 to1 = replace_equiv_address (data->to, data->to_addr);
2528 to1 = adjust_address (to1, mode, 0);
2531 to1 = adjust_address (data->to, mode, data->offset);
2533 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2534 emit_insn (gen_add2_insn (data->to_addr,
2535 GEN_INT (-(HOST_WIDE_INT) size)));
2537 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2538 emit_insn ((*genfun) (to1, cst));
2540 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2541 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2543 if (! data->reverse)
2544 data->offset += size;
2550 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2551 its length in bytes. */
2554 clear_storage (object, size)
2558 #ifdef TARGET_MEM_FUNCTIONS
2560 tree call_expr, arg_list;
2563 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2564 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2566 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2567 just move a zero. Otherwise, do this a piece at a time. */
2568 if (GET_MODE (object) != BLKmode
2569 && GET_CODE (size) == CONST_INT
2570 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2571 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2574 object = protect_from_queue (object, 1);
2575 size = protect_from_queue (size, 0);
2577 if (GET_CODE (size) == CONST_INT
2578 && MOVE_BY_PIECES_P (INTVAL (size), align))
2579 clear_by_pieces (object, INTVAL (size), align);
2582 /* Try the most limited insn first, because there's no point
2583 including more than one in the machine description unless
2584 the more limited one has some advantage. */
2586 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2587 enum machine_mode mode;
2589 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2590 mode = GET_MODE_WIDER_MODE (mode))
2592 enum insn_code code = clrstr_optab[(int) mode];
2593 insn_operand_predicate_fn pred;
2595 if (code != CODE_FOR_nothing
2596 /* We don't need MODE to be narrower than
2597 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2598 the mode mask, as it is returned by the macro, it will
2599 definitely be less than the actual mode mask. */
2600 && ((GET_CODE (size) == CONST_INT
2601 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2602 <= (GET_MODE_MASK (mode) >> 1)))
2603 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2604 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2605 || (*pred) (object, BLKmode))
2606 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2607 || (*pred) (opalign, VOIDmode)))
2610 rtx last = get_last_insn ();
2613 op1 = convert_to_mode (mode, size, 1);
2614 pred = insn_data[(int) code].operand[1].predicate;
2615 if (pred != 0 && ! (*pred) (op1, mode))
2616 op1 = copy_to_mode_reg (mode, op1);
2618 pat = GEN_FCN ((int) code) (object, op1, opalign);
2625 delete_insns_since (last);
2629 /* OBJECT or SIZE may have been passed through protect_from_queue.
2631 It is unsafe to save the value generated by protect_from_queue
2632 and reuse it later. Consider what happens if emit_queue is
2633 called before the return value from protect_from_queue is used.
2635 Expansion of the CALL_EXPR below will call emit_queue before
2636 we are finished emitting RTL for argument setup. So if we are
2637 not careful we could get the wrong value for an argument.
2639 To avoid this problem we go ahead and emit code to copy OBJECT
2640 and SIZE into new pseudos. We can then place those new pseudos
2641 into an RTL_EXPR and use them later, even after a call to
2644 Note this is not strictly needed for library calls since they
2645 do not call emit_queue before loading their arguments. However,
2646 we may need to have library calls call emit_queue in the future
2647 since failing to do so could cause problems for targets which
2648 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2649 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2651 #ifdef TARGET_MEM_FUNCTIONS
2652 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2654 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2655 TREE_UNSIGNED (integer_type_node));
2656 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2659 #ifdef TARGET_MEM_FUNCTIONS
2660 /* It is incorrect to use the libcall calling conventions to call
2661 memset in this context.
2663 This could be a user call to memset and the user may wish to
2664 examine the return value from memset.
2666 For targets where libcalls and normal calls have different
2667 conventions for returning pointers, we could end up generating
2670 So instead of using a libcall sequence we build up a suitable
2671 CALL_EXPR and expand the call in the normal fashion. */
2672 if (fn == NULL_TREE)
2676 /* This was copied from except.c, I don't know if all this is
2677 necessary in this context or not. */
2678 fn = get_identifier ("memset");
2679 fntype = build_pointer_type (void_type_node);
2680 fntype = build_function_type (fntype, NULL_TREE);
2681 fn = build_decl (FUNCTION_DECL, fn, fntype);
2682 ggc_add_tree_root (&fn, 1);
2683 DECL_EXTERNAL (fn) = 1;
2684 TREE_PUBLIC (fn) = 1;
2685 DECL_ARTIFICIAL (fn) = 1;
2686 TREE_NOTHROW (fn) = 1;
2687 make_decl_rtl (fn, NULL);
2688 assemble_external (fn);
2691 /* We need to make an argument list for the function call.
2693 memset has three arguments, the first is a void * addresses, the
2694 second an integer with the initialization value, the last is a
2695 size_t byte count for the copy. */
2697 = build_tree_list (NULL_TREE,
2698 make_tree (build_pointer_type (void_type_node),
2700 TREE_CHAIN (arg_list)
2701 = build_tree_list (NULL_TREE,
2702 make_tree (integer_type_node, const0_rtx));
2703 TREE_CHAIN (TREE_CHAIN (arg_list))
2704 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2705 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2707 /* Now we have to build up the CALL_EXPR itself. */
2708 call_expr = build1 (ADDR_EXPR,
2709 build_pointer_type (TREE_TYPE (fn)), fn);
2710 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2711 call_expr, arg_list, NULL_TREE);
2712 TREE_SIDE_EFFECTS (call_expr) = 1;
2714 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2716 emit_library_call (bzero_libfunc, LCT_NORMAL,
2717 VOIDmode, 2, object, Pmode, size,
2718 TYPE_MODE (integer_type_node));
2726 /* Generate code to copy Y into X.
2727 Both Y and X must have the same mode, except that
2728 Y can be a constant with VOIDmode.
2729 This mode cannot be BLKmode; use emit_block_move for that.
2731 Return the last instruction emitted. */
2734 emit_move_insn (x, y)
2737 enum machine_mode mode = GET_MODE (x);
2738 rtx y_cst = NULL_RTX;
2741 x = protect_from_queue (x, 1);
2742 y = protect_from_queue (y, 0);
2744 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2747 /* Never force constant_p_rtx to memory. */
2748 if (GET_CODE (y) == CONSTANT_P_RTX)
2750 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2753 y = force_const_mem (mode, y);
2756 /* If X or Y are memory references, verify that their addresses are valid
2758 if (GET_CODE (x) == MEM
2759 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2760 && ! push_operand (x, GET_MODE (x)))
2762 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2763 x = validize_mem (x);
2765 if (GET_CODE (y) == MEM
2766 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2768 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2769 y = validize_mem (y);
2771 if (mode == BLKmode)
2774 last_insn = emit_move_insn_1 (x, y);
2776 if (y_cst && GET_CODE (x) == REG)
2777 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2782 /* Low level part of emit_move_insn.
2783 Called just like emit_move_insn, but assumes X and Y
2784 are basically valid. */
2787 emit_move_insn_1 (x, y)
2790 enum machine_mode mode = GET_MODE (x);
2791 enum machine_mode submode;
2792 enum mode_class class = GET_MODE_CLASS (mode);
2795 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2798 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2800 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2802 /* Expand complex moves by moving real part and imag part, if possible. */
2803 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2804 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2806 (class == MODE_COMPLEX_INT
2807 ? MODE_INT : MODE_FLOAT),
2809 && (mov_optab->handlers[(int) submode].insn_code
2810 != CODE_FOR_nothing))
2812 /* Don't split destination if it is a stack push. */
2813 int stack = push_operand (x, GET_MODE (x));
2815 #ifdef PUSH_ROUNDING
2816 /* In case we output to the stack, but the size is smaller machine can
2817 push exactly, we need to use move instructions. */
2819 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2822 int offset1, offset2;
2824 /* Do not use anti_adjust_stack, since we don't want to update
2825 stack_pointer_delta. */
2826 temp = expand_binop (Pmode,
2827 #ifdef STACK_GROWS_DOWNWARD
2834 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2838 if (temp != stack_pointer_rtx)
2839 emit_move_insn (stack_pointer_rtx, temp);
2840 #ifdef STACK_GROWS_DOWNWARD
2842 offset2 = GET_MODE_SIZE (submode);
2844 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2845 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2846 + GET_MODE_SIZE (submode));
2848 emit_move_insn (change_address (x, submode,
2849 gen_rtx_PLUS (Pmode,
2851 GEN_INT (offset1))),
2852 gen_realpart (submode, y));
2853 emit_move_insn (change_address (x, submode,
2854 gen_rtx_PLUS (Pmode,
2856 GEN_INT (offset2))),
2857 gen_imagpart (submode, y));
2861 /* If this is a stack, push the highpart first, so it
2862 will be in the argument order.
2864 In that case, change_address is used only to convert
2865 the mode, not to change the address. */
2868 /* Note that the real part always precedes the imag part in memory
2869 regardless of machine's endianness. */
2870 #ifdef STACK_GROWS_DOWNWARD
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2872 (gen_rtx_MEM (submode, XEXP (x, 0)),
2873 gen_imagpart (submode, y)));
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_realpart (submode, y)));
2878 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2879 (gen_rtx_MEM (submode, XEXP (x, 0)),
2880 gen_realpart (submode, y)));
2881 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2882 (gen_rtx_MEM (submode, XEXP (x, 0)),
2883 gen_imagpart (submode, y)));
2888 rtx realpart_x, realpart_y;
2889 rtx imagpart_x, imagpart_y;
2891 /* If this is a complex value with each part being smaller than a
2892 word, the usual calling sequence will likely pack the pieces into
2893 a single register. Unfortunately, SUBREG of hard registers only
2894 deals in terms of words, so we have a problem converting input
2895 arguments to the CONCAT of two registers that is used elsewhere
2896 for complex values. If this is before reload, we can copy it into
2897 memory and reload. FIXME, we should see about using extract and
2898 insert on integer registers, but complex short and complex char
2899 variables should be rarely used. */
2900 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2901 && (reload_in_progress | reload_completed) == 0)
2903 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2904 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2906 if (packed_dest_p || packed_src_p)
2908 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2909 ? MODE_FLOAT : MODE_INT);
2911 enum machine_mode reg_mode
2912 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2914 if (reg_mode != BLKmode)
2916 rtx mem = assign_stack_temp (reg_mode,
2917 GET_MODE_SIZE (mode), 0);
2918 rtx cmem = adjust_address (mem, mode, 0);
2921 = N_("function using short complex types cannot be inline");
2925 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2926 emit_move_insn_1 (cmem, y);
2927 return emit_move_insn_1 (sreg, mem);
2931 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2932 emit_move_insn_1 (mem, sreg);
2933 return emit_move_insn_1 (x, cmem);
2939 realpart_x = gen_realpart (submode, x);
2940 realpart_y = gen_realpart (submode, y);
2941 imagpart_x = gen_imagpart (submode, x);
2942 imagpart_y = gen_imagpart (submode, y);
2944 /* Show the output dies here. This is necessary for SUBREGs
2945 of pseudos since we cannot track their lifetimes correctly;
2946 hard regs shouldn't appear here except as return values.
2947 We never want to emit such a clobber after reload. */
2949 && ! (reload_in_progress || reload_completed)
2950 && (GET_CODE (realpart_x) == SUBREG
2951 || GET_CODE (imagpart_x) == SUBREG))
2953 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2956 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2957 (realpart_x, realpart_y));
2958 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2959 (imagpart_x, imagpart_y));
2962 return get_last_insn ();
2965 /* This will handle any multi-word mode that lacks a move_insn pattern.
2966 However, you will get better code if you define such patterns,
2967 even if they must turn into multiple assembler instructions. */
2968 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2974 #ifdef PUSH_ROUNDING
2976 /* If X is a push on the stack, do the push now and replace
2977 X with a reference to the stack pointer. */
2978 if (push_operand (x, GET_MODE (x)))
2983 /* Do not use anti_adjust_stack, since we don't want to update
2984 stack_pointer_delta. */
2985 temp = expand_binop (Pmode,
2986 #ifdef STACK_GROWS_DOWNWARD
2993 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2997 if (temp != stack_pointer_rtx)
2998 emit_move_insn (stack_pointer_rtx, temp);
3000 code = GET_CODE (XEXP (x, 0));
3001 /* Just hope that small offsets off SP are OK. */
3002 if (code == POST_INC)
3003 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3004 GEN_INT (-(HOST_WIDE_INT)
3005 GET_MODE_SIZE (GET_MODE (x))));
3006 else if (code == POST_DEC)
3007 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3008 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3010 temp = stack_pointer_rtx;
3012 x = change_address (x, VOIDmode, temp);
3016 /* If we are in reload, see if either operand is a MEM whose address
3017 is scheduled for replacement. */
3018 if (reload_in_progress && GET_CODE (x) == MEM
3019 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3020 x = replace_equiv_address_nv (x, inner);
3021 if (reload_in_progress && GET_CODE (y) == MEM
3022 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3023 y = replace_equiv_address_nv (y, inner);
3029 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3032 rtx xpart = operand_subword (x, i, 1, mode);
3033 rtx ypart = operand_subword (y, i, 1, mode);
3035 /* If we can't get a part of Y, put Y into memory if it is a
3036 constant. Otherwise, force it into a register. If we still
3037 can't get a part of Y, abort. */
3038 if (ypart == 0 && CONSTANT_P (y))
3040 y = force_const_mem (mode, y);
3041 ypart = operand_subword (y, i, 1, mode);
3043 else if (ypart == 0)
3044 ypart = operand_subword_force (y, i, mode);
3046 if (xpart == 0 || ypart == 0)
3049 need_clobber |= (GET_CODE (xpart) == SUBREG);
3051 last_insn = emit_move_insn (xpart, ypart);
3054 seq = gen_sequence ();
3057 /* Show the output dies here. This is necessary for SUBREGs
3058 of pseudos since we cannot track their lifetimes correctly;
3059 hard regs shouldn't appear here except as return values.
3060 We never want to emit such a clobber after reload. */
3062 && ! (reload_in_progress || reload_completed)
3063 && need_clobber != 0)
3065 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3076 /* Pushing data onto the stack. */
3078 /* Push a block of length SIZE (perhaps variable)
3079 and return an rtx to address the beginning of the block.
3080 Note that it is not possible for the value returned to be a QUEUED.
3081 The value may be virtual_outgoing_args_rtx.
3083 EXTRA is the number of bytes of padding to push in addition to SIZE.
3084 BELOW nonzero means this padding comes at low addresses;
3085 otherwise, the padding comes at high addresses. */
3088 push_block (size, extra, below)
3094 size = convert_modes (Pmode, ptr_mode, size, 1);
3095 if (CONSTANT_P (size))
3096 anti_adjust_stack (plus_constant (size, extra));
3097 else if (GET_CODE (size) == REG && extra == 0)
3098 anti_adjust_stack (size);
3101 temp = copy_to_mode_reg (Pmode, size);
3103 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3104 temp, 0, OPTAB_LIB_WIDEN);
3105 anti_adjust_stack (temp);
3108 #ifndef STACK_GROWS_DOWNWARD
3114 temp = virtual_outgoing_args_rtx;
3115 if (extra != 0 && below)
3116 temp = plus_constant (temp, extra);
3120 if (GET_CODE (size) == CONST_INT)
3121 temp = plus_constant (virtual_outgoing_args_rtx,
3122 -INTVAL (size) - (below ? 0 : extra));
3123 else if (extra != 0 && !below)
3124 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3125 negate_rtx (Pmode, plus_constant (size, extra)));
3127 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3128 negate_rtx (Pmode, size));
3131 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3135 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3136 block of SIZE bytes. */
3139 get_push_address (size)
3144 if (STACK_PUSH_CODE == POST_DEC)
3145 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3146 else if (STACK_PUSH_CODE == POST_INC)
3147 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3149 temp = stack_pointer_rtx;
3151 return copy_to_reg (temp);
3154 #ifdef PUSH_ROUNDING
3156 /* Emit single push insn. */
3159 emit_single_push_insn (mode, x, type)
3161 enum machine_mode mode;
3165 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3167 enum insn_code icode;
3168 insn_operand_predicate_fn pred;
3170 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3171 /* If there is push pattern, use it. Otherwise try old way of throwing
3172 MEM representing push operation to move expander. */
3173 icode = push_optab->handlers[(int) mode].insn_code;
3174 if (icode != CODE_FOR_nothing)
3176 if (((pred = insn_data[(int) icode].operand[0].predicate)
3177 && !((*pred) (x, mode))))
3178 x = force_reg (mode, x);
3179 emit_insn (GEN_FCN (icode) (x));
3182 if (GET_MODE_SIZE (mode) == rounded_size)
3183 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3186 #ifdef STACK_GROWS_DOWNWARD
3187 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3188 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3190 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3191 GEN_INT (rounded_size));
3193 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3196 dest = gen_rtx_MEM (mode, dest_addr);
3200 set_mem_attributes (dest, type, 1);
3201 /* Function incoming arguments may overlap with sibling call
3202 outgoing arguments and we cannot allow reordering of reads
3203 from function arguments with stores to outgoing arguments
3204 of sibling calls. */
3205 set_mem_alias_set (dest, 0);
3207 emit_move_insn (dest, x);
3211 /* Generate code to push X onto the stack, assuming it has mode MODE and
3213 MODE is redundant except when X is a CONST_INT (since they don't
3215 SIZE is an rtx for the size of data to be copied (in bytes),
3216 needed only if X is BLKmode.
3218 ALIGN (in bits) is maximum alignment we can assume.
3220 If PARTIAL and REG are both nonzero, then copy that many of the first
3221 words of X into registers starting with REG, and push the rest of X.
3222 The amount of space pushed is decreased by PARTIAL words,
3223 rounded *down* to a multiple of PARM_BOUNDARY.
3224 REG must be a hard register in this case.
3225 If REG is zero but PARTIAL is not, take any all others actions for an
3226 argument partially in registers, but do not actually load any
3229 EXTRA is the amount in bytes of extra space to leave next to this arg.
3230 This is ignored if an argument block has already been allocated.
3232 On a machine that lacks real push insns, ARGS_ADDR is the address of
3233 the bottom of the argument block for this call. We use indexing off there
3234 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3235 argument block has not been preallocated.
3237 ARGS_SO_FAR is the size of args previously pushed for this call.
3239 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3240 for arguments passed in registers. If nonzero, it will be the number
3241 of bytes required. */
3244 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3245 args_addr, args_so_far, reg_parm_stack_space,
3248 enum machine_mode mode;
3257 int reg_parm_stack_space;
3261 enum direction stack_direction
3262 #ifdef STACK_GROWS_DOWNWARD
3268 /* Decide where to pad the argument: `downward' for below,
3269 `upward' for above, or `none' for don't pad it.
3270 Default is below for small data on big-endian machines; else above. */
3271 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3273 /* Invert direction if stack is post-decrement.
3275 if (STACK_PUSH_CODE == POST_DEC)
3276 if (where_pad != none)
3277 where_pad = (where_pad == downward ? upward : downward);
3279 xinner = x = protect_from_queue (x, 0);
3281 if (mode == BLKmode)
3283 /* Copy a block into the stack, entirely or partially. */
3286 int used = partial * UNITS_PER_WORD;
3287 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3295 /* USED is now the # of bytes we need not copy to the stack
3296 because registers will take care of them. */
3299 xinner = adjust_address (xinner, BLKmode, used);
3301 /* If the partial register-part of the arg counts in its stack size,
3302 skip the part of stack space corresponding to the registers.
3303 Otherwise, start copying to the beginning of the stack space,
3304 by setting SKIP to 0. */
3305 skip = (reg_parm_stack_space == 0) ? 0 : used;
3307 #ifdef PUSH_ROUNDING
3308 /* Do it with several push insns if that doesn't take lots of insns
3309 and if there is no difficulty with push insns that skip bytes
3310 on the stack for alignment purposes. */
3313 && GET_CODE (size) == CONST_INT
3315 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3316 /* Here we avoid the case of a structure whose weak alignment
3317 forces many pushes of a small amount of data,
3318 and such small pushes do rounding that causes trouble. */
3319 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3320 || align >= BIGGEST_ALIGNMENT
3321 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3322 == (align / BITS_PER_UNIT)))
3323 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3325 /* Push padding now if padding above and stack grows down,
3326 or if padding below and stack grows up.
3327 But if space already allocated, this has already been done. */
3328 if (extra && args_addr == 0
3329 && where_pad != none && where_pad != stack_direction)
3330 anti_adjust_stack (GEN_INT (extra));
3332 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3334 if (current_function_check_memory_usage && ! in_check_memory_usage)
3338 in_check_memory_usage = 1;
3339 temp = get_push_address (INTVAL (size) - used);
3340 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3341 emit_library_call (chkr_copy_bitmap_libfunc,
3342 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3343 Pmode, XEXP (xinner, 0), Pmode,
3344 GEN_INT (INTVAL (size) - used),
3345 TYPE_MODE (sizetype));
3347 emit_library_call (chkr_set_right_libfunc,
3348 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3349 Pmode, GEN_INT (INTVAL (size) - used),
3350 TYPE_MODE (sizetype),
3351 GEN_INT (MEMORY_USE_RW),
3352 TYPE_MODE (integer_type_node));
3353 in_check_memory_usage = 0;
3357 #endif /* PUSH_ROUNDING */
3361 /* Otherwise make space on the stack and copy the data
3362 to the address of that space. */
3364 /* Deduct words put into registers from the size we must copy. */
3367 if (GET_CODE (size) == CONST_INT)
3368 size = GEN_INT (INTVAL (size) - used);
3370 size = expand_binop (GET_MODE (size), sub_optab, size,
3371 GEN_INT (used), NULL_RTX, 0,
3375 /* Get the address of the stack space.
3376 In this case, we do not deal with EXTRA separately.
3377 A single stack adjust will do. */
3380 temp = push_block (size, extra, where_pad == downward);
3383 else if (GET_CODE (args_so_far) == CONST_INT)
3384 temp = memory_address (BLKmode,
3385 plus_constant (args_addr,
3386 skip + INTVAL (args_so_far)));
3388 temp = memory_address (BLKmode,
3389 plus_constant (gen_rtx_PLUS (Pmode,
3393 if (current_function_check_memory_usage && ! in_check_memory_usage)
3395 in_check_memory_usage = 1;
3396 target = copy_to_reg (temp);
3397 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3398 emit_library_call (chkr_copy_bitmap_libfunc,
3399 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3401 XEXP (xinner, 0), Pmode,
3402 size, TYPE_MODE (sizetype));
3404 emit_library_call (chkr_set_right_libfunc,
3405 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3407 size, TYPE_MODE (sizetype),
3408 GEN_INT (MEMORY_USE_RW),
3409 TYPE_MODE (integer_type_node));
3410 in_check_memory_usage = 0;
3413 target = gen_rtx_MEM (BLKmode, temp);
3417 set_mem_attributes (target, type, 1);
3418 /* Function incoming arguments may overlap with sibling call
3419 outgoing arguments and we cannot allow reordering of reads
3420 from function arguments with stores to outgoing arguments
3421 of sibling calls. */
3422 set_mem_alias_set (target, 0);
3425 set_mem_align (target, align);
3427 /* TEMP is the address of the block. Copy the data there. */
3428 if (GET_CODE (size) == CONST_INT
3429 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3431 move_by_pieces (target, xinner, INTVAL (size), align);
3436 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3437 enum machine_mode mode;
3439 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3441 mode = GET_MODE_WIDER_MODE (mode))
3443 enum insn_code code = movstr_optab[(int) mode];
3444 insn_operand_predicate_fn pred;
3446 if (code != CODE_FOR_nothing
3447 && ((GET_CODE (size) == CONST_INT
3448 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3449 <= (GET_MODE_MASK (mode) >> 1)))
3450 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3451 && (!(pred = insn_data[(int) code].operand[0].predicate)
3452 || ((*pred) (target, BLKmode)))
3453 && (!(pred = insn_data[(int) code].operand[1].predicate)
3454 || ((*pred) (xinner, BLKmode)))
3455 && (!(pred = insn_data[(int) code].operand[3].predicate)
3456 || ((*pred) (opalign, VOIDmode))))
3458 rtx op2 = convert_to_mode (mode, size, 1);
3459 rtx last = get_last_insn ();
3462 pred = insn_data[(int) code].operand[2].predicate;
3463 if (pred != 0 && ! (*pred) (op2, mode))
3464 op2 = copy_to_mode_reg (mode, op2);
3466 pat = GEN_FCN ((int) code) (target, xinner,
3474 delete_insns_since (last);
3479 if (!ACCUMULATE_OUTGOING_ARGS)
3481 /* If the source is referenced relative to the stack pointer,
3482 copy it to another register to stabilize it. We do not need
3483 to do this if we know that we won't be changing sp. */
3485 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3486 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3487 temp = copy_to_reg (temp);
3490 /* Make inhibit_defer_pop nonzero around the library call
3491 to force it to pop the bcopy-arguments right away. */
3493 #ifdef TARGET_MEM_FUNCTIONS
3494 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3495 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3496 convert_to_mode (TYPE_MODE (sizetype),
3497 size, TREE_UNSIGNED (sizetype)),
3498 TYPE_MODE (sizetype));
3500 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3501 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3502 convert_to_mode (TYPE_MODE (integer_type_node),
3504 TREE_UNSIGNED (integer_type_node)),
3505 TYPE_MODE (integer_type_node));
3510 else if (partial > 0)
3512 /* Scalar partly in registers. */
3514 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3517 /* # words of start of argument
3518 that we must make space for but need not store. */
3519 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3520 int args_offset = INTVAL (args_so_far);
3523 /* Push padding now if padding above and stack grows down,
3524 or if padding below and stack grows up.
3525 But if space already allocated, this has already been done. */
3526 if (extra && args_addr == 0
3527 && where_pad != none && where_pad != stack_direction)
3528 anti_adjust_stack (GEN_INT (extra));
3530 /* If we make space by pushing it, we might as well push
3531 the real data. Otherwise, we can leave OFFSET nonzero
3532 and leave the space uninitialized. */
3536 /* Now NOT_STACK gets the number of words that we don't need to
3537 allocate on the stack. */
3538 not_stack = partial - offset;
3540 /* If the partial register-part of the arg counts in its stack size,
3541 skip the part of stack space corresponding to the registers.
3542 Otherwise, start copying to the beginning of the stack space,
3543 by setting SKIP to 0. */
3544 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3546 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3547 x = validize_mem (force_const_mem (mode, x));
3549 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3550 SUBREGs of such registers are not allowed. */
3551 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3552 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3553 x = copy_to_reg (x);
3555 /* Loop over all the words allocated on the stack for this arg. */
3556 /* We can do it by words, because any scalar bigger than a word
3557 has a size a multiple of a word. */
3558 #ifndef PUSH_ARGS_REVERSED
3559 for (i = not_stack; i < size; i++)
3561 for (i = size - 1; i >= not_stack; i--)
3563 if (i >= not_stack + offset)
3564 emit_push_insn (operand_subword_force (x, i, mode),
3565 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3567 GEN_INT (args_offset + ((i - not_stack + skip)
3569 reg_parm_stack_space, alignment_pad);
3574 rtx target = NULL_RTX;
3577 /* Push padding now if padding above and stack grows down,
3578 or if padding below and stack grows up.
3579 But if space already allocated, this has already been done. */
3580 if (extra && args_addr == 0
3581 && where_pad != none && where_pad != stack_direction)
3582 anti_adjust_stack (GEN_INT (extra));
3584 #ifdef PUSH_ROUNDING
3585 if (args_addr == 0 && PUSH_ARGS)
3586 emit_single_push_insn (mode, x, type);
3590 if (GET_CODE (args_so_far) == CONST_INT)
3592 = memory_address (mode,
3593 plus_constant (args_addr,
3594 INTVAL (args_so_far)));
3596 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3599 dest = gen_rtx_MEM (mode, addr);
3602 set_mem_attributes (dest, type, 1);
3603 /* Function incoming arguments may overlap with sibling call
3604 outgoing arguments and we cannot allow reordering of reads
3605 from function arguments with stores to outgoing arguments
3606 of sibling calls. */
3607 set_mem_alias_set (dest, 0);
3610 emit_move_insn (dest, x);
3614 if (current_function_check_memory_usage && ! in_check_memory_usage)
3616 in_check_memory_usage = 1;
3618 target = get_push_address (GET_MODE_SIZE (mode));
3620 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3621 emit_library_call (chkr_copy_bitmap_libfunc,
3622 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3623 Pmode, XEXP (x, 0), Pmode,
3624 GEN_INT (GET_MODE_SIZE (mode)),
3625 TYPE_MODE (sizetype));
3627 emit_library_call (chkr_set_right_libfunc,
3628 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3629 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3630 TYPE_MODE (sizetype),
3631 GEN_INT (MEMORY_USE_RW),
3632 TYPE_MODE (integer_type_node));
3633 in_check_memory_usage = 0;
3638 /* If part should go in registers, copy that part
3639 into the appropriate registers. Do this now, at the end,
3640 since mem-to-mem copies above may do function calls. */
3641 if (partial > 0 && reg != 0)
3643 /* Handle calls that pass values in multiple non-contiguous locations.
3644 The Irix 6 ABI has examples of this. */
3645 if (GET_CODE (reg) == PARALLEL)
3646 emit_group_load (reg, x, -1, align); /* ??? size? */
3648 move_block_to_reg (REGNO (reg), x, partial, mode);
3651 if (extra && args_addr == 0 && where_pad == stack_direction)
3652 anti_adjust_stack (GEN_INT (extra));
3654 if (alignment_pad && args_addr == 0)
3655 anti_adjust_stack (alignment_pad);
3658 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3666 /* Only registers can be subtargets. */
3667 || GET_CODE (x) != REG
3668 /* If the register is readonly, it can't be set more than once. */
3669 || RTX_UNCHANGING_P (x)
3670 /* Don't use hard regs to avoid extending their life. */
3671 || REGNO (x) < FIRST_PSEUDO_REGISTER
3672 /* Avoid subtargets inside loops,
3673 since they hide some invariant expressions. */
3674 || preserve_subexpressions_p ())
3678 /* Expand an assignment that stores the value of FROM into TO.
3679 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3680 (This may contain a QUEUED rtx;
3681 if the value is constant, this rtx is a constant.)
3682 Otherwise, the returned value is NULL_RTX.
3684 SUGGEST_REG is no longer actually used.
3685 It used to mean, copy the value through a register
3686 and return that register, if that is possible.
3687 We now use WANT_VALUE to decide whether to do this. */
3690 expand_assignment (to, from, want_value, suggest_reg)
3693 int suggest_reg ATTRIBUTE_UNUSED;
3698 /* Don't crash if the lhs of the assignment was erroneous. */
3700 if (TREE_CODE (to) == ERROR_MARK)
3702 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3703 return want_value ? result : NULL_RTX;
3706 /* Assignment of a structure component needs special treatment
3707 if the structure component's rtx is not simply a MEM.
3708 Assignment of an array element at a constant index, and assignment of
3709 an array element in an unaligned packed structure field, has the same
3712 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3713 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3715 enum machine_mode mode1;
3716 HOST_WIDE_INT bitsize, bitpos;
3721 unsigned int alignment;
3724 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3725 &unsignedp, &volatilep, &alignment);
3727 /* If we are going to use store_bit_field and extract_bit_field,
3728 make sure to_rtx will be safe for multiple use. */
3730 if (mode1 == VOIDmode && want_value)
3731 tem = stabilize_reference (tem);
3733 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3736 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3738 if (GET_CODE (to_rtx) != MEM)
3741 if (GET_MODE (offset_rtx) != ptr_mode)
3742 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3744 #ifdef POINTERS_EXTEND_UNSIGNED
3745 if (GET_MODE (offset_rtx) != Pmode)
3746 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3749 /* A constant address in TO_RTX can have VOIDmode, we must not try
3750 to call force_reg for that case. Avoid that case. */
3751 if (GET_CODE (to_rtx) == MEM
3752 && GET_MODE (to_rtx) == BLKmode
3753 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3755 && (bitpos % bitsize) == 0
3756 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3757 && alignment == GET_MODE_ALIGNMENT (mode1))
3760 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3762 if (GET_CODE (XEXP (temp, 0)) == REG)
3765 to_rtx = (replace_equiv_address
3766 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3771 to_rtx = offset_address (to_rtx, offset_rtx,
3772 highest_pow2_factor (offset));
3777 if (GET_CODE (to_rtx) == MEM)
3779 /* When the offset is zero, to_rtx is the address of the
3780 structure we are storing into, and hence may be shared.
3781 We must make a new MEM before setting the volatile bit. */
3783 to_rtx = copy_rtx (to_rtx);
3785 MEM_VOLATILE_P (to_rtx) = 1;
3787 #if 0 /* This was turned off because, when a field is volatile
3788 in an object which is not volatile, the object may be in a register,
3789 and then we would abort over here. */
3795 if (TREE_CODE (to) == COMPONENT_REF
3796 && TREE_READONLY (TREE_OPERAND (to, 1)))
3799 to_rtx = copy_rtx (to_rtx);
3801 RTX_UNCHANGING_P (to_rtx) = 1;
3804 /* Check the access. */
3805 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3810 enum machine_mode best_mode;
3812 best_mode = get_best_mode (bitsize, bitpos,
3813 TYPE_ALIGN (TREE_TYPE (tem)),
3815 if (best_mode == VOIDmode)
3818 best_mode_size = GET_MODE_BITSIZE (best_mode);
3819 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3820 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3821 size *= GET_MODE_SIZE (best_mode);
3823 /* Check the access right of the pointer. */
3824 in_check_memory_usage = 1;
3826 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3827 VOIDmode, 3, to_addr, Pmode,
3828 GEN_INT (size), TYPE_MODE (sizetype),
3829 GEN_INT (MEMORY_USE_WO),
3830 TYPE_MODE (integer_type_node));
3831 in_check_memory_usage = 0;
3834 /* If this is a varying-length object, we must get the address of
3835 the source and do an explicit block move. */
3838 unsigned int from_align;
3839 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3841 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3843 emit_block_move (inner_to_rtx, from_rtx, expr_size (from));
3851 if (! can_address_p (to))
3853 to_rtx = copy_rtx (to_rtx);
3854 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3857 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3859 /* Spurious cast for HPUX compiler. */
3860 ? ((enum machine_mode)
3861 TYPE_MODE (TREE_TYPE (to)))
3865 int_size_in_bytes (TREE_TYPE (tem)),
3866 get_alias_set (to));
3868 preserve_temp_slots (result);
3872 /* If the value is meaningful, convert RESULT to the proper mode.
3873 Otherwise, return nothing. */
3874 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3875 TYPE_MODE (TREE_TYPE (from)),
3877 TREE_UNSIGNED (TREE_TYPE (to)))
3882 /* If the rhs is a function call and its value is not an aggregate,
3883 call the function before we start to compute the lhs.
3884 This is needed for correct code for cases such as
3885 val = setjmp (buf) on machines where reference to val
3886 requires loading up part of an address in a separate insn.
3888 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3889 since it might be a promoted variable where the zero- or sign- extension
3890 needs to be done. Handling this in the normal way is safe because no
3891 computation is done before the call. */
3892 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3893 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3894 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3895 && GET_CODE (DECL_RTL (to)) == REG))
3900 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3902 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3904 /* Handle calls that return values in multiple non-contiguous locations.
3905 The Irix 6 ABI has examples of this. */
3906 if (GET_CODE (to_rtx) == PARALLEL)
3907 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3908 TYPE_ALIGN (TREE_TYPE (from)));
3909 else if (GET_MODE (to_rtx) == BLKmode)
3910 emit_block_move (to_rtx, value, expr_size (from));
3913 #ifdef POINTERS_EXTEND_UNSIGNED
3914 if (POINTER_TYPE_P (TREE_TYPE (to))
3915 && GET_MODE (to_rtx) != GET_MODE (value))
3916 value = convert_memory_address (GET_MODE (to_rtx), value);
3918 emit_move_insn (to_rtx, value);
3920 preserve_temp_slots (to_rtx);
3923 return want_value ? to_rtx : NULL_RTX;
3926 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3927 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3930 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3932 /* Don't move directly into a return register. */
3933 if (TREE_CODE (to) == RESULT_DECL
3934 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3939 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3941 if (GET_CODE (to_rtx) == PARALLEL)
3942 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3943 TYPE_ALIGN (TREE_TYPE (from)));
3945 emit_move_insn (to_rtx, temp);
3947 preserve_temp_slots (to_rtx);
3950 return want_value ? to_rtx : NULL_RTX;
3953 /* In case we are returning the contents of an object which overlaps
3954 the place the value is being stored, use a safe function when copying
3955 a value through a pointer into a structure value return block. */
3956 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3957 && current_function_returns_struct
3958 && !current_function_returns_pcc_struct)
3963 size = expr_size (from);
3964 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3965 EXPAND_MEMORY_USE_DONT);
3967 /* Copy the rights of the bitmap. */
3968 if (current_function_check_memory_usage)
3969 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3970 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3971 XEXP (from_rtx, 0), Pmode,
3972 convert_to_mode (TYPE_MODE (sizetype),
3973 size, TREE_UNSIGNED (sizetype)),
3974 TYPE_MODE (sizetype));
3976 #ifdef TARGET_MEM_FUNCTIONS
3977 emit_library_call (memmove_libfunc, LCT_NORMAL,
3978 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3979 XEXP (from_rtx, 0), Pmode,
3980 convert_to_mode (TYPE_MODE (sizetype),
3981 size, TREE_UNSIGNED (sizetype)),
3982 TYPE_MODE (sizetype));
3984 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3985 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3986 XEXP (to_rtx, 0), Pmode,
3987 convert_to_mode (TYPE_MODE (integer_type_node),
3988 size, TREE_UNSIGNED (integer_type_node)),
3989 TYPE_MODE (integer_type_node));
3992 preserve_temp_slots (to_rtx);
3995 return want_value ? to_rtx : NULL_RTX;
3998 /* Compute FROM and store the value in the rtx we got. */
4001 result = store_expr (from, to_rtx, want_value);
4002 preserve_temp_slots (result);
4005 return want_value ? result : NULL_RTX;
4008 /* Generate code for computing expression EXP,
4009 and storing the value into TARGET.
4010 TARGET may contain a QUEUED rtx.
4012 If WANT_VALUE is nonzero, return a copy of the value
4013 not in TARGET, so that we can be sure to use the proper
4014 value in a containing expression even if TARGET has something
4015 else stored in it. If possible, we copy the value through a pseudo
4016 and return that pseudo. Or, if the value is constant, we try to
4017 return the constant. In some cases, we return a pseudo
4018 copied *from* TARGET.
4020 If the mode is BLKmode then we may return TARGET itself.
4021 It turns out that in BLKmode it doesn't cause a problem.
4022 because C has no operators that could combine two different
4023 assignments into the same BLKmode object with different values
4024 with no sequence point. Will other languages need this to
4027 If WANT_VALUE is 0, we return NULL, to make sure
4028 to catch quickly any cases where the caller uses the value
4029 and fails to set WANT_VALUE. */
4032 store_expr (exp, target, want_value)
4038 int dont_return_target = 0;
4039 int dont_store_target = 0;
4041 if (TREE_CODE (exp) == COMPOUND_EXPR)
4043 /* Perform first part of compound expression, then assign from second
4045 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4047 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4049 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4051 /* For conditional expression, get safe form of the target. Then
4052 test the condition, doing the appropriate assignment on either
4053 side. This avoids the creation of unnecessary temporaries.
4054 For non-BLKmode, it is more efficient not to do this. */
4056 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4059 target = protect_from_queue (target, 1);
4061 do_pending_stack_adjust ();
4063 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4064 start_cleanup_deferral ();
4065 store_expr (TREE_OPERAND (exp, 1), target, 0);
4066 end_cleanup_deferral ();
4068 emit_jump_insn (gen_jump (lab2));
4071 start_cleanup_deferral ();
4072 store_expr (TREE_OPERAND (exp, 2), target, 0);
4073 end_cleanup_deferral ();
4078 return want_value ? target : NULL_RTX;
4080 else if (queued_subexp_p (target))
4081 /* If target contains a postincrement, let's not risk
4082 using it as the place to generate the rhs. */
4084 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4086 /* Expand EXP into a new pseudo. */
4087 temp = gen_reg_rtx (GET_MODE (target));
4088 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4091 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4093 /* If target is volatile, ANSI requires accessing the value
4094 *from* the target, if it is accessed. So make that happen.
4095 In no case return the target itself. */
4096 if (! MEM_VOLATILE_P (target) && want_value)
4097 dont_return_target = 1;
4099 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4100 && GET_MODE (target) != BLKmode)
4101 /* If target is in memory and caller wants value in a register instead,
4102 arrange that. Pass TARGET as target for expand_expr so that,
4103 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4104 We know expand_expr will not use the target in that case.
4105 Don't do this if TARGET is volatile because we are supposed
4106 to write it and then read it. */
4108 temp = expand_expr (exp, target, GET_MODE (target), 0);
4109 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4111 /* If TEMP is already in the desired TARGET, only copy it from
4112 memory and don't store it there again. */
4114 || (rtx_equal_p (temp, target)
4115 && ! side_effects_p (temp) && ! side_effects_p (target)))
4116 dont_store_target = 1;
4117 temp = copy_to_reg (temp);
4119 dont_return_target = 1;
4121 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4122 /* If this is an scalar in a register that is stored in a wider mode
4123 than the declared mode, compute the result into its declared mode
4124 and then convert to the wider mode. Our value is the computed
4127 /* If we don't want a value, we can do the conversion inside EXP,
4128 which will often result in some optimizations. Do the conversion
4129 in two steps: first change the signedness, if needed, then
4130 the extend. But don't do this if the type of EXP is a subtype
4131 of something else since then the conversion might involve
4132 more than just converting modes. */
4133 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4134 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4136 if (TREE_UNSIGNED (TREE_TYPE (exp))
4137 != SUBREG_PROMOTED_UNSIGNED_P (target))
4140 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4144 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4145 SUBREG_PROMOTED_UNSIGNED_P (target)),
4149 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4151 /* If TEMP is a volatile MEM and we want a result value, make
4152 the access now so it gets done only once. Likewise if
4153 it contains TARGET. */
4154 if (GET_CODE (temp) == MEM && want_value
4155 && (MEM_VOLATILE_P (temp)
4156 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4157 temp = copy_to_reg (temp);
4159 /* If TEMP is a VOIDmode constant, use convert_modes to make
4160 sure that we properly convert it. */
4161 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4163 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4164 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4165 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4166 GET_MODE (target), temp,
4167 SUBREG_PROMOTED_UNSIGNED_P (target));
4170 convert_move (SUBREG_REG (target), temp,
4171 SUBREG_PROMOTED_UNSIGNED_P (target));
4173 /* If we promoted a constant, change the mode back down to match
4174 target. Otherwise, the caller might get confused by a result whose
4175 mode is larger than expected. */
4177 if (want_value && GET_MODE (temp) != GET_MODE (target)
4178 && GET_MODE (temp) != VOIDmode)
4180 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4181 SUBREG_PROMOTED_VAR_P (temp) = 1;
4182 SUBREG_PROMOTED_UNSIGNED_P (temp)
4183 = SUBREG_PROMOTED_UNSIGNED_P (target);
4186 return want_value ? temp : NULL_RTX;
4190 temp = expand_expr (exp, target, GET_MODE (target), 0);
4191 /* Return TARGET if it's a specified hardware register.
4192 If TARGET is a volatile mem ref, either return TARGET
4193 or return a reg copied *from* TARGET; ANSI requires this.
4195 Otherwise, if TEMP is not TARGET, return TEMP
4196 if it is constant (for efficiency),
4197 or if we really want the correct value. */
4198 if (!(target && GET_CODE (target) == REG
4199 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4200 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4201 && ! rtx_equal_p (temp, target)
4202 && (CONSTANT_P (temp) || want_value))
4203 dont_return_target = 1;
4206 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4207 the same as that of TARGET, adjust the constant. This is needed, for
4208 example, in case it is a CONST_DOUBLE and we want only a word-sized
4210 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4211 && TREE_CODE (exp) != ERROR_MARK
4212 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4213 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4214 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4216 if (current_function_check_memory_usage
4217 && GET_CODE (target) == MEM
4218 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4220 in_check_memory_usage = 1;
4221 if (GET_CODE (temp) == MEM)
4222 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4223 VOIDmode, 3, XEXP (target, 0), Pmode,
4224 XEXP (temp, 0), Pmode,
4225 expr_size (exp), TYPE_MODE (sizetype));
4227 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4228 VOIDmode, 3, XEXP (target, 0), Pmode,
4229 expr_size (exp), TYPE_MODE (sizetype),
4230 GEN_INT (MEMORY_USE_WO),
4231 TYPE_MODE (integer_type_node));
4232 in_check_memory_usage = 0;
4235 /* If value was not generated in the target, store it there.
4236 Convert the value to TARGET's type first if nec. */
4237 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4238 one or both of them are volatile memory refs, we have to distinguish
4240 - expand_expr has used TARGET. In this case, we must not generate
4241 another copy. This can be detected by TARGET being equal according
4243 - expand_expr has not used TARGET - that means that the source just
4244 happens to have the same RTX form. Since temp will have been created
4245 by expand_expr, it will compare unequal according to == .
4246 We must generate a copy in this case, to reach the correct number
4247 of volatile memory references. */
4249 if ((! rtx_equal_p (temp, target)
4250 || (temp != target && (side_effects_p (temp)
4251 || side_effects_p (target))))
4252 && TREE_CODE (exp) != ERROR_MARK
4253 && ! dont_store_target)
4255 target = protect_from_queue (target, 1);
4256 if (GET_MODE (temp) != GET_MODE (target)
4257 && GET_MODE (temp) != VOIDmode)
4259 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4260 if (dont_return_target)
4262 /* In this case, we will return TEMP,
4263 so make sure it has the proper mode.
4264 But don't forget to store the value into TARGET. */
4265 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4266 emit_move_insn (target, temp);
4269 convert_move (target, temp, unsignedp);
4272 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4274 /* Handle copying a string constant into an array.
4275 The string constant may be shorter than the array.
4276 So copy just the string's actual length, and clear the rest. */
4280 /* Get the size of the data type of the string,
4281 which is actually the size of the target. */
4282 size = expr_size (exp);
4283 if (GET_CODE (size) == CONST_INT
4284 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4285 emit_block_move (target, temp, size);
4288 /* Compute the size of the data to copy from the string. */
4290 = size_binop (MIN_EXPR,
4291 make_tree (sizetype, size),
4292 size_int (TREE_STRING_LENGTH (exp)));
4293 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4297 /* Copy that much. */
4298 emit_block_move (target, temp, copy_size_rtx);
4300 /* Figure out how much is left in TARGET that we have to clear.
4301 Do all calculations in ptr_mode. */
4303 addr = XEXP (target, 0);
4304 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4306 if (GET_CODE (copy_size_rtx) == CONST_INT)
4308 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4309 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4313 addr = force_reg (ptr_mode, addr);
4314 addr = expand_binop (ptr_mode, add_optab, addr,
4315 copy_size_rtx, NULL_RTX, 0,
4318 size = expand_binop (ptr_mode, sub_optab, size,
4319 copy_size_rtx, NULL_RTX, 0,
4322 label = gen_label_rtx ();
4323 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4324 GET_MODE (size), 0, 0, label);
4327 if (size != const0_rtx)
4329 rtx dest = gen_rtx_MEM (BLKmode, addr);
4331 MEM_COPY_ATTRIBUTES (dest, target);
4333 /* Be sure we can write on ADDR. */
4334 in_check_memory_usage = 1;
4335 if (current_function_check_memory_usage)
4336 emit_library_call (chkr_check_addr_libfunc,
4337 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4339 size, TYPE_MODE (sizetype),
4340 GEN_INT (MEMORY_USE_WO),
4341 TYPE_MODE (integer_type_node));
4342 in_check_memory_usage = 0;
4343 clear_storage (dest, size);
4350 /* Handle calls that return values in multiple non-contiguous locations.
4351 The Irix 6 ABI has examples of this. */
4352 else if (GET_CODE (target) == PARALLEL)
4353 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4354 TYPE_ALIGN (TREE_TYPE (exp)));
4355 else if (GET_MODE (temp) == BLKmode)
4356 emit_block_move (target, temp, expr_size (exp));
4358 emit_move_insn (target, temp);
4361 /* If we don't want a value, return NULL_RTX. */
4365 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4366 ??? The latter test doesn't seem to make sense. */
4367 else if (dont_return_target && GET_CODE (temp) != MEM)
4370 /* Return TARGET itself if it is a hard register. */
4371 else if (want_value && GET_MODE (target) != BLKmode
4372 && ! (GET_CODE (target) == REG
4373 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4374 return copy_to_reg (target);
4380 /* Return 1 if EXP just contains zeros. */
4388 switch (TREE_CODE (exp))
4392 case NON_LVALUE_EXPR:
4393 return is_zeros_p (TREE_OPERAND (exp, 0));
4396 return integer_zerop (exp);
4400 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4403 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4406 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4407 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4408 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4409 if (! is_zeros_p (TREE_VALUE (elt)))
4419 /* Return 1 if EXP contains mostly (3/4) zeros. */
4422 mostly_zeros_p (exp)
4425 if (TREE_CODE (exp) == CONSTRUCTOR)
4427 int elts = 0, zeros = 0;
4428 tree elt = CONSTRUCTOR_ELTS (exp);
4429 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4431 /* If there are no ranges of true bits, it is all zero. */
4432 return elt == NULL_TREE;
4434 for (; elt; elt = TREE_CHAIN (elt))
4436 /* We do not handle the case where the index is a RANGE_EXPR,
4437 so the statistic will be somewhat inaccurate.
4438 We do make a more accurate count in store_constructor itself,
4439 so since this function is only used for nested array elements,
4440 this should be close enough. */
4441 if (mostly_zeros_p (TREE_VALUE (elt)))
4446 return 4 * zeros >= 3 * elts;
4449 return is_zeros_p (exp);
4452 /* Helper function for store_constructor.
4453 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4454 TYPE is the type of the CONSTRUCTOR, not the element type.
4455 ALIGN and CLEARED are as for store_constructor.
4456 ALIAS_SET is the alias set to use for any stores.
4458 This provides a recursive shortcut back to store_constructor when it isn't
4459 necessary to go through store_field. This is so that we can pass through
4460 the cleared field to let store_constructor know that we may not have to
4461 clear a substructure if the outer structure has already been cleared. */
4464 store_constructor_field (target, bitsize, bitpos,
4465 mode, exp, type, align, cleared, alias_set)
4467 unsigned HOST_WIDE_INT bitsize;
4468 HOST_WIDE_INT bitpos;
4469 enum machine_mode mode;
4475 if (TREE_CODE (exp) == CONSTRUCTOR
4476 && bitpos % BITS_PER_UNIT == 0
4477 /* If we have a non-zero bitpos for a register target, then we just
4478 let store_field do the bitfield handling. This is unlikely to
4479 generate unnecessary clear instructions anyways. */
4480 && (bitpos == 0 || GET_CODE (target) == MEM))
4482 if (GET_CODE (target) == MEM)
4484 = adjust_address (target,
4485 GET_MODE (target) == BLKmode
4487 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4488 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4491 /* Show the alignment may no longer be what it was and update the alias
4492 set, if required. */
4494 align = MIN (align, (unsigned int) bitpos & - bitpos);
4496 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4497 && MEM_ALIAS_SET (target) != 0)
4499 target = copy_rtx (target);
4500 set_mem_alias_set (target, alias_set);
4503 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4506 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4507 int_size_in_bytes (type), alias_set);
4510 /* Store the value of constructor EXP into the rtx TARGET.
4511 TARGET is either a REG or a MEM.
4512 ALIGN is the maximum known alignment for TARGET.
4513 CLEARED is true if TARGET is known to have been zero'd.
4514 SIZE is the number of bytes of TARGET we are allowed to modify: this
4515 may not be the same as the size of EXP if we are assigning to a field
4516 which has been packed to exclude padding bits. */
4519 store_constructor (exp, target, align, cleared, size)
4526 tree type = TREE_TYPE (exp);
4527 #ifdef WORD_REGISTER_OPERATIONS
4528 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4531 /* We know our target cannot conflict, since safe_from_p has been called. */
4533 /* Don't try copying piece by piece into a hard register
4534 since that is vulnerable to being clobbered by EXP.
4535 Instead, construct in a pseudo register and then copy it all. */
4536 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4538 rtx temp = gen_reg_rtx (GET_MODE (target));
4539 store_constructor (exp, temp, align, cleared, size);
4540 emit_move_insn (target, temp);
4545 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4546 || TREE_CODE (type) == QUAL_UNION_TYPE)
4550 /* Inform later passes that the whole union value is dead. */
4551 if ((TREE_CODE (type) == UNION_TYPE
4552 || TREE_CODE (type) == QUAL_UNION_TYPE)
4555 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4557 /* If the constructor is empty, clear the union. */
4558 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4559 clear_storage (target, expr_size (exp));
4562 /* If we are building a static constructor into a register,
4563 set the initial value as zero so we can fold the value into
4564 a constant. But if more than one register is involved,
4565 this probably loses. */
4566 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4567 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4570 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4575 /* If the constructor has fewer fields than the structure
4576 or if we are initializing the structure to mostly zeros,
4577 clear the whole structure first. Don't do this if TARGET is a
4578 register whose mode size isn't equal to SIZE since clear_storage
4579 can't handle this case. */
4581 && ((list_length (CONSTRUCTOR_ELTS (exp))
4582 != fields_length (type))
4583 || mostly_zeros_p (exp))
4584 && (GET_CODE (target) != REG
4585 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4588 clear_storage (target, GEN_INT (size));
4593 /* Inform later passes that the old value is dead. */
4594 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4596 /* Store each element of the constructor into
4597 the corresponding field of TARGET. */
4599 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4601 tree field = TREE_PURPOSE (elt);
4602 #ifdef WORD_REGISTER_OPERATIONS
4603 tree value = TREE_VALUE (elt);
4605 enum machine_mode mode;
4606 HOST_WIDE_INT bitsize;
4607 HOST_WIDE_INT bitpos = 0;
4610 rtx to_rtx = target;
4612 /* Just ignore missing fields.
4613 We cleared the whole structure, above,
4614 if any fields are missing. */
4618 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4621 if (host_integerp (DECL_SIZE (field), 1))
4622 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4626 unsignedp = TREE_UNSIGNED (field);
4627 mode = DECL_MODE (field);
4628 if (DECL_BIT_FIELD (field))
4631 offset = DECL_FIELD_OFFSET (field);
4632 if (host_integerp (offset, 0)
4633 && host_integerp (bit_position (field), 0))
4635 bitpos = int_bit_position (field);
4639 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4645 if (contains_placeholder_p (offset))
4646 offset = build (WITH_RECORD_EXPR, sizetype,
4647 offset, make_tree (TREE_TYPE (exp), target));
4649 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4650 if (GET_CODE (to_rtx) != MEM)
4653 if (GET_MODE (offset_rtx) != ptr_mode)
4654 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4656 #ifdef POINTERS_EXTEND_UNSIGNED
4657 if (GET_MODE (offset_rtx) != Pmode)
4658 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4661 to_rtx = offset_address (to_rtx, offset_rtx,
4662 highest_pow2_factor (offset));
4664 align = DECL_OFFSET_ALIGN (field);
4667 if (TREE_READONLY (field))
4669 if (GET_CODE (to_rtx) == MEM)
4670 to_rtx = copy_rtx (to_rtx);
4672 RTX_UNCHANGING_P (to_rtx) = 1;
4675 #ifdef WORD_REGISTER_OPERATIONS
4676 /* If this initializes a field that is smaller than a word, at the
4677 start of a word, try to widen it to a full word.
4678 This special case allows us to output C++ member function
4679 initializations in a form that the optimizers can understand. */
4680 if (GET_CODE (target) == REG
4681 && bitsize < BITS_PER_WORD
4682 && bitpos % BITS_PER_WORD == 0
4683 && GET_MODE_CLASS (mode) == MODE_INT
4684 && TREE_CODE (value) == INTEGER_CST
4686 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4688 tree type = TREE_TYPE (value);
4689 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4691 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4692 value = convert (type, value);
4694 if (BYTES_BIG_ENDIAN)
4696 = fold (build (LSHIFT_EXPR, type, value,
4697 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4698 bitsize = BITS_PER_WORD;
4703 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4704 && DECL_NONADDRESSABLE_P (field))
4706 to_rtx = copy_rtx (to_rtx);
4707 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4710 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4711 TREE_VALUE (elt), type, align, cleared,
4712 get_alias_set (TREE_TYPE (field)));
4715 else if (TREE_CODE (type) == ARRAY_TYPE)
4720 tree domain = TYPE_DOMAIN (type);
4721 tree elttype = TREE_TYPE (type);
4722 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4723 && TYPE_MAX_VALUE (domain)
4724 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4725 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4726 HOST_WIDE_INT minelt = 0;
4727 HOST_WIDE_INT maxelt = 0;
4729 /* If we have constant bounds for the range of the type, get them. */
4732 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4733 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4736 /* If the constructor has fewer elements than the array,
4737 clear the whole array first. Similarly if this is
4738 static constructor of a non-BLKmode object. */
4739 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4743 HOST_WIDE_INT count = 0, zero_count = 0;
4744 need_to_clear = ! const_bounds_p;
4746 /* This loop is a more accurate version of the loop in
4747 mostly_zeros_p (it handles RANGE_EXPR in an index).
4748 It is also needed to check for missing elements. */
4749 for (elt = CONSTRUCTOR_ELTS (exp);
4750 elt != NULL_TREE && ! need_to_clear;
4751 elt = TREE_CHAIN (elt))
4753 tree index = TREE_PURPOSE (elt);
4754 HOST_WIDE_INT this_node_count;
4756 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4758 tree lo_index = TREE_OPERAND (index, 0);
4759 tree hi_index = TREE_OPERAND (index, 1);
4761 if (! host_integerp (lo_index, 1)
4762 || ! host_integerp (hi_index, 1))
4768 this_node_count = (tree_low_cst (hi_index, 1)
4769 - tree_low_cst (lo_index, 1) + 1);
4772 this_node_count = 1;
4774 count += this_node_count;
4775 if (mostly_zeros_p (TREE_VALUE (elt)))
4776 zero_count += this_node_count;
4779 /* Clear the entire array first if there are any missing elements,
4780 or if the incidence of zero elements is >= 75%. */
4782 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4786 if (need_to_clear && size > 0)
4789 clear_storage (target, GEN_INT (size));
4792 else if (REG_P (target))
4793 /* Inform later passes that the old value is dead. */
4794 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4796 /* Store each element of the constructor into
4797 the corresponding element of TARGET, determined
4798 by counting the elements. */
4799 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4801 elt = TREE_CHAIN (elt), i++)
4803 enum machine_mode mode;
4804 HOST_WIDE_INT bitsize;
4805 HOST_WIDE_INT bitpos;
4807 tree value = TREE_VALUE (elt);
4808 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4809 tree index = TREE_PURPOSE (elt);
4810 rtx xtarget = target;
4812 if (cleared && is_zeros_p (value))
4815 unsignedp = TREE_UNSIGNED (elttype);
4816 mode = TYPE_MODE (elttype);
4817 if (mode == BLKmode)
4818 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4819 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4822 bitsize = GET_MODE_BITSIZE (mode);
4824 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4826 tree lo_index = TREE_OPERAND (index, 0);
4827 tree hi_index = TREE_OPERAND (index, 1);
4828 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4829 struct nesting *loop;
4830 HOST_WIDE_INT lo, hi, count;
4833 /* If the range is constant and "small", unroll the loop. */
4835 && host_integerp (lo_index, 0)
4836 && host_integerp (hi_index, 0)
4837 && (lo = tree_low_cst (lo_index, 0),
4838 hi = tree_low_cst (hi_index, 0),
4839 count = hi - lo + 1,
4840 (GET_CODE (target) != MEM
4842 || (host_integerp (TYPE_SIZE (elttype), 1)
4843 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4846 lo -= minelt; hi -= minelt;
4847 for (; lo <= hi; lo++)
4849 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4851 if (GET_CODE (target) == MEM
4852 && !MEM_KEEP_ALIAS_SET_P (target)
4853 && TYPE_NONALIASED_COMPONENT (type))
4855 target = copy_rtx (target);
4856 MEM_KEEP_ALIAS_SET_P (target) = 1;
4859 store_constructor_field
4860 (target, bitsize, bitpos, mode, value, type, align,
4861 cleared, get_alias_set (elttype));
4866 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4867 loop_top = gen_label_rtx ();
4868 loop_end = gen_label_rtx ();
4870 unsignedp = TREE_UNSIGNED (domain);
4872 index = build_decl (VAR_DECL, NULL_TREE, domain);
4875 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4877 SET_DECL_RTL (index, index_r);
4878 if (TREE_CODE (value) == SAVE_EXPR
4879 && SAVE_EXPR_RTL (value) == 0)
4881 /* Make sure value gets expanded once before the
4883 expand_expr (value, const0_rtx, VOIDmode, 0);
4886 store_expr (lo_index, index_r, 0);
4887 loop = expand_start_loop (0);
4889 /* Assign value to element index. */
4891 = convert (ssizetype,
4892 fold (build (MINUS_EXPR, TREE_TYPE (index),
4893 index, TYPE_MIN_VALUE (domain))));
4894 position = size_binop (MULT_EXPR, position,
4896 TYPE_SIZE_UNIT (elttype)));
4898 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4899 xtarget = offset_address (target, pos_rtx,
4900 highest_pow2_factor (position));
4901 xtarget = adjust_address (xtarget, mode, 0);
4902 if (TREE_CODE (value) == CONSTRUCTOR)
4903 store_constructor (value, xtarget, align, cleared,
4904 bitsize / BITS_PER_UNIT);
4906 store_expr (value, xtarget, 0);
4908 expand_exit_loop_if_false (loop,
4909 build (LT_EXPR, integer_type_node,
4912 expand_increment (build (PREINCREMENT_EXPR,
4914 index, integer_one_node), 0, 0);
4916 emit_label (loop_end);
4919 else if ((index != 0 && ! host_integerp (index, 0))
4920 || ! host_integerp (TYPE_SIZE (elttype), 1))
4925 index = ssize_int (1);
4928 index = convert (ssizetype,
4929 fold (build (MINUS_EXPR, index,
4930 TYPE_MIN_VALUE (domain))));
4932 position = size_binop (MULT_EXPR, index,
4934 TYPE_SIZE_UNIT (elttype)));
4935 xtarget = offset_address (target,
4936 expand_expr (position, 0, VOIDmode, 0),
4937 highest_pow2_factor (position));
4938 xtarget = adjust_address (xtarget, mode, 0);
4939 store_expr (value, xtarget, 0);
4944 bitpos = ((tree_low_cst (index, 0) - minelt)
4945 * tree_low_cst (TYPE_SIZE (elttype), 1));
4947 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4949 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4950 && TYPE_NONALIASED_COMPONENT (type))
4952 target = copy_rtx (target);
4953 MEM_KEEP_ALIAS_SET_P (target) = 1;
4956 store_constructor_field (target, bitsize, bitpos, mode, value,
4957 type, align, cleared,
4958 get_alias_set (elttype));
4964 /* Set constructor assignments. */
4965 else if (TREE_CODE (type) == SET_TYPE)
4967 tree elt = CONSTRUCTOR_ELTS (exp);
4968 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4969 tree domain = TYPE_DOMAIN (type);
4970 tree domain_min, domain_max, bitlength;
4972 /* The default implementation strategy is to extract the constant
4973 parts of the constructor, use that to initialize the target,
4974 and then "or" in whatever non-constant ranges we need in addition.
4976 If a large set is all zero or all ones, it is
4977 probably better to set it using memset (if available) or bzero.
4978 Also, if a large set has just a single range, it may also be
4979 better to first clear all the first clear the set (using
4980 bzero/memset), and set the bits we want. */
4982 /* Check for all zeros. */
4983 if (elt == NULL_TREE && size > 0)
4986 clear_storage (target, GEN_INT (size));
4990 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4991 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4992 bitlength = size_binop (PLUS_EXPR,
4993 size_diffop (domain_max, domain_min),
4996 nbits = tree_low_cst (bitlength, 1);
4998 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4999 are "complicated" (more than one range), initialize (the
5000 constant parts) by copying from a constant. */
5001 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5002 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5004 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5005 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5006 char *bit_buffer = (char *) alloca (nbits);
5007 HOST_WIDE_INT word = 0;
5008 unsigned int bit_pos = 0;
5009 unsigned int ibit = 0;
5010 unsigned int offset = 0; /* In bytes from beginning of set. */
5012 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5015 if (bit_buffer[ibit])
5017 if (BYTES_BIG_ENDIAN)
5018 word |= (1 << (set_word_size - 1 - bit_pos));
5020 word |= 1 << bit_pos;
5024 if (bit_pos >= set_word_size || ibit == nbits)
5026 if (word != 0 || ! cleared)
5028 rtx datum = GEN_INT (word);
5031 /* The assumption here is that it is safe to use
5032 XEXP if the set is multi-word, but not if
5033 it's single-word. */
5034 if (GET_CODE (target) == MEM)
5035 to_rtx = adjust_address (target, mode, offset);
5036 else if (offset == 0)
5040 emit_move_insn (to_rtx, datum);
5047 offset += set_word_size / BITS_PER_UNIT;
5052 /* Don't bother clearing storage if the set is all ones. */
5053 if (TREE_CHAIN (elt) != NULL_TREE
5054 || (TREE_PURPOSE (elt) == NULL_TREE
5056 : ( ! host_integerp (TREE_VALUE (elt), 0)
5057 || ! host_integerp (TREE_PURPOSE (elt), 0)
5058 || (tree_low_cst (TREE_VALUE (elt), 0)
5059 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5060 != (HOST_WIDE_INT) nbits))))
5061 clear_storage (target, expr_size (exp));
5063 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5065 /* Start of range of element or NULL. */
5066 tree startbit = TREE_PURPOSE (elt);
5067 /* End of range of element, or element value. */
5068 tree endbit = TREE_VALUE (elt);
5069 #ifdef TARGET_MEM_FUNCTIONS
5070 HOST_WIDE_INT startb, endb;
5072 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5074 bitlength_rtx = expand_expr (bitlength,
5075 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5077 /* Handle non-range tuple element like [ expr ]. */
5078 if (startbit == NULL_TREE)
5080 startbit = save_expr (endbit);
5084 startbit = convert (sizetype, startbit);
5085 endbit = convert (sizetype, endbit);
5086 if (! integer_zerop (domain_min))
5088 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5089 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5091 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5092 EXPAND_CONST_ADDRESS);
5093 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5094 EXPAND_CONST_ADDRESS);
5100 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5103 emit_move_insn (targetx, target);
5106 else if (GET_CODE (target) == MEM)
5111 #ifdef TARGET_MEM_FUNCTIONS
5112 /* Optimization: If startbit and endbit are
5113 constants divisible by BITS_PER_UNIT,
5114 call memset instead. */
5115 if (TREE_CODE (startbit) == INTEGER_CST
5116 && TREE_CODE (endbit) == INTEGER_CST
5117 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5118 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5120 emit_library_call (memset_libfunc, LCT_NORMAL,
5122 plus_constant (XEXP (targetx, 0),
5123 startb / BITS_PER_UNIT),
5125 constm1_rtx, TYPE_MODE (integer_type_node),
5126 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5127 TYPE_MODE (sizetype));
5131 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5132 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5133 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5134 startbit_rtx, TYPE_MODE (sizetype),
5135 endbit_rtx, TYPE_MODE (sizetype));
5138 emit_move_insn (target, targetx);
5146 /* Store the value of EXP (an expression tree)
5147 into a subfield of TARGET which has mode MODE and occupies
5148 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5149 If MODE is VOIDmode, it means that we are storing into a bit-field.
5151 If VALUE_MODE is VOIDmode, return nothing in particular.
5152 UNSIGNEDP is not used in this case.
5154 Otherwise, return an rtx for the value stored. This rtx
5155 has mode VALUE_MODE if that is convenient to do.
5156 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5158 ALIGN is the alignment that TARGET is known to have.
5159 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5161 ALIAS_SET is the alias set for the destination. This value will
5162 (in general) be different from that for TARGET, since TARGET is a
5163 reference to the containing structure. */
5166 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5167 unsignedp, align, total_size, alias_set)
5169 HOST_WIDE_INT bitsize;
5170 HOST_WIDE_INT bitpos;
5171 enum machine_mode mode;
5173 enum machine_mode value_mode;
5176 HOST_WIDE_INT total_size;
5179 HOST_WIDE_INT width_mask = 0;
5181 if (TREE_CODE (exp) == ERROR_MARK)
5184 /* If we have nothing to store, do nothing unless the expression has
5187 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5189 if (bitsize < HOST_BITS_PER_WIDE_INT)
5190 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5192 /* If we are storing into an unaligned field of an aligned union that is
5193 in a register, we may have the mode of TARGET being an integer mode but
5194 MODE == BLKmode. In that case, get an aligned object whose size and
5195 alignment are the same as TARGET and store TARGET into it (we can avoid
5196 the store if the field being stored is the entire width of TARGET). Then
5197 call ourselves recursively to store the field into a BLKmode version of
5198 that object. Finally, load from the object into TARGET. This is not
5199 very efficient in general, but should only be slightly more expensive
5200 than the otherwise-required unaligned accesses. Perhaps this can be
5201 cleaned up later. */
5204 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5208 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5211 rtx blk_object = copy_rtx (object);
5213 PUT_MODE (blk_object, BLKmode);
5215 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5216 emit_move_insn (object, target);
5218 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5219 align, total_size, alias_set);
5221 /* Even though we aren't returning target, we need to
5222 give it the updated value. */
5223 emit_move_insn (target, object);
5228 if (GET_CODE (target) == CONCAT)
5230 /* We're storing into a struct containing a single __complex. */
5234 return store_expr (exp, target, 0);
5237 /* If the structure is in a register or if the component
5238 is a bit field, we cannot use addressing to access it.
5239 Use bit-field techniques or SUBREG to store in it. */
5241 if (mode == VOIDmode
5242 || (mode != BLKmode && ! direct_store[(int) mode]
5243 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5244 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5245 || GET_CODE (target) == REG
5246 || GET_CODE (target) == SUBREG
5247 /* If the field isn't aligned enough to store as an ordinary memref,
5248 store it as a bit field. */
5249 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5250 && (align < GET_MODE_ALIGNMENT (mode)
5251 || bitpos % GET_MODE_ALIGNMENT (mode)))
5252 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5253 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5254 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5255 /* If the RHS and field are a constant size and the size of the
5256 RHS isn't the same size as the bitfield, we must use bitfield
5259 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5260 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5262 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5264 /* If BITSIZE is narrower than the size of the type of EXP
5265 we will be narrowing TEMP. Normally, what's wanted are the
5266 low-order bits. However, if EXP's type is a record and this is
5267 big-endian machine, we want the upper BITSIZE bits. */
5268 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5269 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5270 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5271 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5272 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5276 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5278 if (mode != VOIDmode && mode != BLKmode
5279 && mode != TYPE_MODE (TREE_TYPE (exp)))
5280 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5282 /* If the modes of TARGET and TEMP are both BLKmode, both
5283 must be in memory and BITPOS must be aligned on a byte
5284 boundary. If so, we simply do a block copy. */
5285 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5287 unsigned int exp_align = expr_align (exp);
5289 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5290 || bitpos % BITS_PER_UNIT != 0)
5293 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5295 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5296 align = MIN (exp_align, align);
5298 /* Find an alignment that is consistent with the bit position. */
5299 while ((bitpos % align) != 0)
5302 emit_block_move (target, temp,
5303 bitsize == -1 ? expr_size (exp)
5304 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5307 return value_mode == VOIDmode ? const0_rtx : target;
5310 /* Store the value in the bitfield. */
5311 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5312 if (value_mode != VOIDmode)
5314 /* The caller wants an rtx for the value. */
5315 /* If possible, avoid refetching from the bitfield itself. */
5317 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5320 enum machine_mode tmode;
5323 return expand_and (temp,
5327 GET_MODE (temp) == VOIDmode
5329 : GET_MODE (temp))), NULL_RTX);
5330 tmode = GET_MODE (temp);
5331 if (tmode == VOIDmode)
5333 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5334 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5335 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5337 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5338 NULL_RTX, value_mode, 0, align,
5345 rtx addr = XEXP (target, 0);
5348 /* If a value is wanted, it must be the lhs;
5349 so make the address stable for multiple use. */
5351 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5352 && ! CONSTANT_ADDRESS_P (addr)
5353 /* A frame-pointer reference is already stable. */
5354 && ! (GET_CODE (addr) == PLUS
5355 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5356 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5357 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5358 target = replace_equiv_address (target, copy_to_reg (addr));
5360 /* Now build a reference to just the desired component. */
5362 to_rtx = copy_rtx (adjust_address (target, mode,
5363 bitpos / BITS_PER_UNIT));
5365 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5366 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5368 to_rtx = copy_rtx (to_rtx);
5369 set_mem_alias_set (to_rtx, alias_set);
5372 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5376 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5377 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5378 codes and find the ultimate containing object, which we return.
5380 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5381 bit position, and *PUNSIGNEDP to the signedness of the field.
5382 If the position of the field is variable, we store a tree
5383 giving the variable offset (in units) in *POFFSET.
5384 This offset is in addition to the bit position.
5385 If the position is not variable, we store 0 in *POFFSET.
5386 We set *PALIGNMENT to the alignment of the address that will be
5387 computed. This is the alignment of the thing we return if *POFFSET
5388 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5390 If any of the extraction expressions is volatile,
5391 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5393 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5394 is a mode that can be used to access the field. In that case, *PBITSIZE
5397 If the field describes a variable-sized object, *PMODE is set to
5398 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5399 this case, but the address of the object can be found. */
5402 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5403 punsignedp, pvolatilep, palignment)
5405 HOST_WIDE_INT *pbitsize;
5406 HOST_WIDE_INT *pbitpos;
5408 enum machine_mode *pmode;
5411 unsigned int *palignment;
5414 enum machine_mode mode = VOIDmode;
5415 tree offset = size_zero_node;
5416 tree bit_offset = bitsize_zero_node;
5417 unsigned int alignment = BIGGEST_ALIGNMENT;
5418 tree placeholder_ptr = 0;
5421 /* First get the mode, signedness, and size. We do this from just the
5422 outermost expression. */
5423 if (TREE_CODE (exp) == COMPONENT_REF)
5425 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5426 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5427 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5429 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5431 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5433 size_tree = TREE_OPERAND (exp, 1);
5434 *punsignedp = TREE_UNSIGNED (exp);
5438 mode = TYPE_MODE (TREE_TYPE (exp));
5439 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5441 if (mode == BLKmode)
5442 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5444 *pbitsize = GET_MODE_BITSIZE (mode);
5449 if (! host_integerp (size_tree, 1))
5450 mode = BLKmode, *pbitsize = -1;
5452 *pbitsize = tree_low_cst (size_tree, 1);
5455 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5456 and find the ultimate containing object. */
5459 if (TREE_CODE (exp) == BIT_FIELD_REF)
5460 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5461 else if (TREE_CODE (exp) == COMPONENT_REF)
5463 tree field = TREE_OPERAND (exp, 1);
5464 tree this_offset = DECL_FIELD_OFFSET (field);
5466 /* If this field hasn't been filled in yet, don't go
5467 past it. This should only happen when folding expressions
5468 made during type construction. */
5469 if (this_offset == 0)
5471 else if (! TREE_CONSTANT (this_offset)
5472 && contains_placeholder_p (this_offset))
5473 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5475 offset = size_binop (PLUS_EXPR, offset, this_offset);
5476 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5477 DECL_FIELD_BIT_OFFSET (field));
5479 if (! host_integerp (offset, 0))
5480 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5483 else if (TREE_CODE (exp) == ARRAY_REF
5484 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5486 tree index = TREE_OPERAND (exp, 1);
5487 tree array = TREE_OPERAND (exp, 0);
5488 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5489 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5490 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5492 /* We assume all arrays have sizes that are a multiple of a byte.
5493 First subtract the lower bound, if any, in the type of the
5494 index, then convert to sizetype and multiply by the size of the
5496 if (low_bound != 0 && ! integer_zerop (low_bound))
5497 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5500 /* If the index has a self-referential type, pass it to a
5501 WITH_RECORD_EXPR; if the component size is, pass our
5502 component to one. */
5503 if (! TREE_CONSTANT (index)
5504 && contains_placeholder_p (index))
5505 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5506 if (! TREE_CONSTANT (unit_size)
5507 && contains_placeholder_p (unit_size))
5508 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5510 offset = size_binop (PLUS_EXPR, offset,
5511 size_binop (MULT_EXPR,
5512 convert (sizetype, index),
5516 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5518 tree new = find_placeholder (exp, &placeholder_ptr);
5520 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5521 We might have been called from tree optimization where we
5522 haven't set up an object yet. */
5530 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5531 && ! ((TREE_CODE (exp) == NOP_EXPR
5532 || TREE_CODE (exp) == CONVERT_EXPR)
5533 && (TYPE_MODE (TREE_TYPE (exp))
5534 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5537 /* If any reference in the chain is volatile, the effect is volatile. */
5538 if (TREE_THIS_VOLATILE (exp))
5541 /* If the offset is non-constant already, then we can't assume any
5542 alignment more than the alignment here. */
5543 if (! TREE_CONSTANT (offset))
5544 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5546 exp = TREE_OPERAND (exp, 0);
5550 alignment = MIN (alignment, DECL_ALIGN (exp));
5551 else if (TREE_TYPE (exp) != 0)
5552 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5554 /* If OFFSET is constant, see if we can return the whole thing as a
5555 constant bit position. Otherwise, split it up. */
5556 if (host_integerp (offset, 0)
5557 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5559 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5560 && host_integerp (tem, 0))
5561 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5563 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5566 *palignment = alignment;
5570 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5572 static enum memory_use_mode
5573 get_memory_usage_from_modifier (modifier)
5574 enum expand_modifier modifier;
5580 return MEMORY_USE_RO;
5582 case EXPAND_MEMORY_USE_WO:
5583 return MEMORY_USE_WO;
5585 case EXPAND_MEMORY_USE_RW:
5586 return MEMORY_USE_RW;
5588 case EXPAND_MEMORY_USE_DONT:
5589 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5590 MEMORY_USE_DONT, because they are modifiers to a call of
5591 expand_expr in the ADDR_EXPR case of expand_expr. */
5592 case EXPAND_CONST_ADDRESS:
5593 case EXPAND_INITIALIZER:
5594 return MEMORY_USE_DONT;
5595 case EXPAND_MEMORY_USE_BAD:
5601 /* Given an rtx VALUE that may contain additions and multiplications, return
5602 an equivalent value that just refers to a register, memory, or constant.
5603 This is done by generating instructions to perform the arithmetic and
5604 returning a pseudo-register containing the value.
5606 The returned value may be a REG, SUBREG, MEM or constant. */
5609 force_operand (value, target)
5613 /* Use a temporary to force order of execution of calls to
5617 /* Use subtarget as the target for operand 0 of a binary operation. */
5618 rtx subtarget = get_subtarget (target);
5620 /* Check for a PIC address load. */
5622 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5623 && XEXP (value, 0) == pic_offset_table_rtx
5624 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5625 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5626 || GET_CODE (XEXP (value, 1)) == CONST))
5629 subtarget = gen_reg_rtx (GET_MODE (value));
5630 emit_move_insn (subtarget, value);
5634 if (GET_CODE (value) == PLUS)
5635 binoptab = add_optab;
5636 else if (GET_CODE (value) == MINUS)
5637 binoptab = sub_optab;
5638 else if (GET_CODE (value) == MULT)
5640 op2 = XEXP (value, 1);
5641 if (!CONSTANT_P (op2)
5642 && !(GET_CODE (op2) == REG && op2 != subtarget))
5644 tmp = force_operand (XEXP (value, 0), subtarget);
5645 return expand_mult (GET_MODE (value), tmp,
5646 force_operand (op2, NULL_RTX),
5652 op2 = XEXP (value, 1);
5653 if (!CONSTANT_P (op2)
5654 && !(GET_CODE (op2) == REG && op2 != subtarget))
5656 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5658 binoptab = add_optab;
5659 op2 = negate_rtx (GET_MODE (value), op2);
5662 /* Check for an addition with OP2 a constant integer and our first
5663 operand a PLUS of a virtual register and something else. In that
5664 case, we want to emit the sum of the virtual register and the
5665 constant first and then add the other value. This allows virtual
5666 register instantiation to simply modify the constant rather than
5667 creating another one around this addition. */
5668 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5669 && GET_CODE (XEXP (value, 0)) == PLUS
5670 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5671 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5672 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5674 rtx temp = expand_binop (GET_MODE (value), binoptab,
5675 XEXP (XEXP (value, 0), 0), op2,
5676 subtarget, 0, OPTAB_LIB_WIDEN);
5677 return expand_binop (GET_MODE (value), binoptab, temp,
5678 force_operand (XEXP (XEXP (value, 0), 1), 0),
5679 target, 0, OPTAB_LIB_WIDEN);
5682 tmp = force_operand (XEXP (value, 0), subtarget);
5683 return expand_binop (GET_MODE (value), binoptab, tmp,
5684 force_operand (op2, NULL_RTX),
5685 target, 0, OPTAB_LIB_WIDEN);
5686 /* We give UNSIGNEDP = 0 to expand_binop
5687 because the only operations we are expanding here are signed ones. */
5692 /* Subroutine of expand_expr: return nonzero iff there is no way that
5693 EXP can reference X, which is being modified. TOP_P is nonzero if this
5694 call is going to be used to determine whether we need a temporary
5695 for EXP, as opposed to a recursive call to this function.
5697 It is always safe for this routine to return zero since it merely
5698 searches for optimization opportunities. */
5701 safe_from_p (x, exp, top_p)
5708 static tree save_expr_list;
5711 /* If EXP has varying size, we MUST use a target since we currently
5712 have no way of allocating temporaries of variable size
5713 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5714 So we assume here that something at a higher level has prevented a
5715 clash. This is somewhat bogus, but the best we can do. Only
5716 do this when X is BLKmode and when we are at the top level. */
5717 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5718 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5719 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5720 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5721 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5723 && GET_MODE (x) == BLKmode)
5724 /* If X is in the outgoing argument area, it is always safe. */
5725 || (GET_CODE (x) == MEM
5726 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5727 || (GET_CODE (XEXP (x, 0)) == PLUS
5728 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5731 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5732 find the underlying pseudo. */
5733 if (GET_CODE (x) == SUBREG)
5736 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5740 /* A SAVE_EXPR might appear many times in the expression passed to the
5741 top-level safe_from_p call, and if it has a complex subexpression,
5742 examining it multiple times could result in a combinatorial explosion.
5743 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5744 with optimization took about 28 minutes to compile -- even though it was
5745 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5746 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5747 we have processed. Note that the only test of top_p was above. */
5756 rtn = safe_from_p (x, exp, 0);
5758 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5759 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5764 /* Now look at our tree code and possibly recurse. */
5765 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5768 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5775 if (TREE_CODE (exp) == TREE_LIST)
5776 return ((TREE_VALUE (exp) == 0
5777 || safe_from_p (x, TREE_VALUE (exp), 0))
5778 && (TREE_CHAIN (exp) == 0
5779 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5780 else if (TREE_CODE (exp) == ERROR_MARK)
5781 return 1; /* An already-visited SAVE_EXPR? */
5786 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5790 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5791 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5795 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5796 the expression. If it is set, we conflict iff we are that rtx or
5797 both are in memory. Otherwise, we check all operands of the
5798 expression recursively. */
5800 switch (TREE_CODE (exp))
5803 /* If the operand is static or we are static, we can't conflict.
5804 Likewise if we don't conflict with the operand at all. */
5805 if (staticp (TREE_OPERAND (exp, 0))
5806 || TREE_STATIC (exp)
5807 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5810 /* Otherwise, the only way this can conflict is if we are taking
5811 the address of a DECL a that address if part of X, which is
5813 exp = TREE_OPERAND (exp, 0);
5816 if (!DECL_RTL_SET_P (exp)
5817 || GET_CODE (DECL_RTL (exp)) != MEM)
5820 exp_rtl = XEXP (DECL_RTL (exp), 0);
5825 if (GET_CODE (x) == MEM
5826 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5827 get_alias_set (exp)))
5832 /* Assume that the call will clobber all hard registers and
5834 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5835 || GET_CODE (x) == MEM)
5840 /* If a sequence exists, we would have to scan every instruction
5841 in the sequence to see if it was safe. This is probably not
5843 if (RTL_EXPR_SEQUENCE (exp))
5846 exp_rtl = RTL_EXPR_RTL (exp);
5849 case WITH_CLEANUP_EXPR:
5850 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5853 case CLEANUP_POINT_EXPR:
5854 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5857 exp_rtl = SAVE_EXPR_RTL (exp);
5861 /* If we've already scanned this, don't do it again. Otherwise,
5862 show we've scanned it and record for clearing the flag if we're
5864 if (TREE_PRIVATE (exp))
5867 TREE_PRIVATE (exp) = 1;
5868 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5870 TREE_PRIVATE (exp) = 0;
5874 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5878 /* The only operand we look at is operand 1. The rest aren't
5879 part of the expression. */
5880 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5882 case METHOD_CALL_EXPR:
5883 /* This takes an rtx argument, but shouldn't appear here. */
5890 /* If we have an rtx, we do not need to scan our operands. */
5894 nops = first_rtl_op (TREE_CODE (exp));
5895 for (i = 0; i < nops; i++)
5896 if (TREE_OPERAND (exp, i) != 0
5897 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5900 /* If this is a language-specific tree code, it may require
5901 special handling. */
5902 if ((unsigned int) TREE_CODE (exp)
5903 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5905 && !(*lang_safe_from_p) (x, exp))
5909 /* If we have an rtl, find any enclosed object. Then see if we conflict
5913 if (GET_CODE (exp_rtl) == SUBREG)
5915 exp_rtl = SUBREG_REG (exp_rtl);
5916 if (GET_CODE (exp_rtl) == REG
5917 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5921 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5922 are memory and they conflict. */
5923 return ! (rtx_equal_p (x, exp_rtl)
5924 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5925 && true_dependence (exp_rtl, GET_MODE (x), x,
5926 rtx_addr_varies_p)));
5929 /* If we reach here, it is safe. */
5933 /* Subroutine of expand_expr: return rtx if EXP is a
5934 variable or parameter; else return 0. */
5941 switch (TREE_CODE (exp))
5945 return DECL_RTL (exp);
5951 #ifdef MAX_INTEGER_COMPUTATION_MODE
5954 check_max_integer_computation_mode (exp)
5957 enum tree_code code;
5958 enum machine_mode mode;
5960 /* Strip any NOPs that don't change the mode. */
5962 code = TREE_CODE (exp);
5964 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5965 if (code == NOP_EXPR
5966 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5969 /* First check the type of the overall operation. We need only look at
5970 unary, binary and relational operations. */
5971 if (TREE_CODE_CLASS (code) == '1'
5972 || TREE_CODE_CLASS (code) == '2'
5973 || TREE_CODE_CLASS (code) == '<')
5975 mode = TYPE_MODE (TREE_TYPE (exp));
5976 if (GET_MODE_CLASS (mode) == MODE_INT
5977 && mode > MAX_INTEGER_COMPUTATION_MODE)
5978 internal_error ("unsupported wide integer operation");
5981 /* Check operand of a unary op. */
5982 if (TREE_CODE_CLASS (code) == '1')
5984 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5985 if (GET_MODE_CLASS (mode) == MODE_INT
5986 && mode > MAX_INTEGER_COMPUTATION_MODE)
5987 internal_error ("unsupported wide integer operation");
5990 /* Check operands of a binary/comparison op. */
5991 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5993 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5994 if (GET_MODE_CLASS (mode) == MODE_INT
5995 && mode > MAX_INTEGER_COMPUTATION_MODE)
5996 internal_error ("unsupported wide integer operation");
5998 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5999 if (GET_MODE_CLASS (mode) == MODE_INT
6000 && mode > MAX_INTEGER_COMPUTATION_MODE)
6001 internal_error ("unsupported wide integer operation");
6006 /* Return the highest power of two that EXP is known to be a multiple of.
6007 This is used in updating alignment of MEMs in array references. */
6009 static HOST_WIDE_INT
6010 highest_pow2_factor (exp)
6013 HOST_WIDE_INT c0, c1;
6015 switch (TREE_CODE (exp))
6018 /* If the integer is expressable in a HOST_WIDE_INT, we can find
6019 the lowest bit that's a one. If the result is zero or negative,
6020 pessimize by returning 1. This is overly-conservative, but such
6021 things should not happen in the offset expressions that we are
6023 if (host_integerp (exp, 0))
6025 c0 = tree_low_cst (exp, 0);
6026 return c0 >= 0 ? c0 & -c0 : 1;
6030 case PLUS_EXPR: case MINUS_EXPR:
6031 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6032 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6033 return MIN (c0, c1);
6036 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6037 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6040 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6042 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6043 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6044 return MAX (1, c0 / c1);
6046 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6047 case COMPOUND_EXPR: case SAVE_EXPR:
6048 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6051 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6052 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6053 return MIN (c0, c1);
6062 /* Return an object on the placeholder list that matches EXP, a
6063 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6064 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6065 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6066 is a location which initially points to a starting location in the
6067 placeholder list (zero means start of the list) and where a pointer into
6068 the placeholder list at which the object is found is placed. */
6071 find_placeholder (exp, plist)
6075 tree type = TREE_TYPE (exp);
6076 tree placeholder_expr;
6078 for (placeholder_expr
6079 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6080 placeholder_expr != 0;
6081 placeholder_expr = TREE_CHAIN (placeholder_expr))
6083 tree need_type = TYPE_MAIN_VARIANT (type);
6086 /* Find the outermost reference that is of the type we want. If none,
6087 see if any object has a type that is a pointer to the type we
6089 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6090 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6091 || TREE_CODE (elt) == COND_EXPR)
6092 ? TREE_OPERAND (elt, 1)
6093 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6094 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6095 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6096 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6097 ? TREE_OPERAND (elt, 0) : 0))
6098 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6101 *plist = placeholder_expr;
6105 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6107 = ((TREE_CODE (elt) == COMPOUND_EXPR
6108 || TREE_CODE (elt) == COND_EXPR)
6109 ? TREE_OPERAND (elt, 1)
6110 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6111 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6112 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6113 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6114 ? TREE_OPERAND (elt, 0) : 0))
6115 if (POINTER_TYPE_P (TREE_TYPE (elt))
6116 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6120 *plist = placeholder_expr;
6121 return build1 (INDIRECT_REF, need_type, elt);
6128 /* expand_expr: generate code for computing expression EXP.
6129 An rtx for the computed value is returned. The value is never null.
6130 In the case of a void EXP, const0_rtx is returned.
6132 The value may be stored in TARGET if TARGET is nonzero.
6133 TARGET is just a suggestion; callers must assume that
6134 the rtx returned may not be the same as TARGET.
6136 If TARGET is CONST0_RTX, it means that the value will be ignored.
6138 If TMODE is not VOIDmode, it suggests generating the
6139 result in mode TMODE. But this is done only when convenient.
6140 Otherwise, TMODE is ignored and the value generated in its natural mode.
6141 TMODE is just a suggestion; callers must assume that
6142 the rtx returned may not have mode TMODE.
6144 Note that TARGET may have neither TMODE nor MODE. In that case, it
6145 probably will not be used.
6147 If MODIFIER is EXPAND_SUM then when EXP is an addition
6148 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6149 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6150 products as above, or REG or MEM, or constant.
6151 Ordinarily in such cases we would output mul or add instructions
6152 and then return a pseudo reg containing the sum.
6154 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6155 it also marks a label as absolutely required (it can't be dead).
6156 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6157 This is used for outputting expressions used in initializers.
6159 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6160 with a constant address even if that address is not normally legitimate.
6161 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6164 expand_expr (exp, target, tmode, modifier)
6167 enum machine_mode tmode;
6168 enum expand_modifier modifier;
6171 tree type = TREE_TYPE (exp);
6172 int unsignedp = TREE_UNSIGNED (type);
6173 enum machine_mode mode;
6174 enum tree_code code = TREE_CODE (exp);
6176 rtx subtarget, original_target;
6179 /* Used by check-memory-usage to make modifier read only. */
6180 enum expand_modifier ro_modifier;
6182 /* Handle ERROR_MARK before anybody tries to access its type. */
6183 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6185 op0 = CONST0_RTX (tmode);
6191 mode = TYPE_MODE (type);
6192 /* Use subtarget as the target for operand 0 of a binary operation. */
6193 subtarget = get_subtarget (target);
6194 original_target = target;
6195 ignore = (target == const0_rtx
6196 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6197 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6198 || code == COND_EXPR)
6199 && TREE_CODE (type) == VOID_TYPE));
6201 /* Make a read-only version of the modifier. */
6202 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6203 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6204 ro_modifier = modifier;
6206 ro_modifier = EXPAND_NORMAL;
6208 /* If we are going to ignore this result, we need only do something
6209 if there is a side-effect somewhere in the expression. If there
6210 is, short-circuit the most common cases here. Note that we must
6211 not call expand_expr with anything but const0_rtx in case this
6212 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6216 if (! TREE_SIDE_EFFECTS (exp))
6219 /* Ensure we reference a volatile object even if value is ignored, but
6220 don't do this if all we are doing is taking its address. */
6221 if (TREE_THIS_VOLATILE (exp)
6222 && TREE_CODE (exp) != FUNCTION_DECL
6223 && mode != VOIDmode && mode != BLKmode
6224 && modifier != EXPAND_CONST_ADDRESS)
6226 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6227 if (GET_CODE (temp) == MEM)
6228 temp = copy_to_reg (temp);
6232 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6233 || code == INDIRECT_REF || code == BUFFER_REF)
6234 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6235 VOIDmode, ro_modifier);
6236 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6237 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6239 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6241 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6245 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6246 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6247 /* If the second operand has no side effects, just evaluate
6249 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6250 VOIDmode, ro_modifier);
6251 else if (code == BIT_FIELD_REF)
6253 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6255 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6257 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6265 #ifdef MAX_INTEGER_COMPUTATION_MODE
6266 /* Only check stuff here if the mode we want is different from the mode
6267 of the expression; if it's the same, check_max_integer_computiation_mode
6268 will handle it. Do we really need to check this stuff at all? */
6271 && GET_MODE (target) != mode
6272 && TREE_CODE (exp) != INTEGER_CST
6273 && TREE_CODE (exp) != PARM_DECL
6274 && TREE_CODE (exp) != ARRAY_REF
6275 && TREE_CODE (exp) != ARRAY_RANGE_REF
6276 && TREE_CODE (exp) != COMPONENT_REF
6277 && TREE_CODE (exp) != BIT_FIELD_REF
6278 && TREE_CODE (exp) != INDIRECT_REF
6279 && TREE_CODE (exp) != CALL_EXPR
6280 && TREE_CODE (exp) != VAR_DECL
6281 && TREE_CODE (exp) != RTL_EXPR)
6283 enum machine_mode mode = GET_MODE (target);
6285 if (GET_MODE_CLASS (mode) == MODE_INT
6286 && mode > MAX_INTEGER_COMPUTATION_MODE)
6287 internal_error ("unsupported wide integer operation");
6291 && TREE_CODE (exp) != INTEGER_CST
6292 && TREE_CODE (exp) != PARM_DECL
6293 && TREE_CODE (exp) != ARRAY_REF
6294 && TREE_CODE (exp) != ARRAY_RANGE_REF
6295 && TREE_CODE (exp) != COMPONENT_REF
6296 && TREE_CODE (exp) != BIT_FIELD_REF
6297 && TREE_CODE (exp) != INDIRECT_REF
6298 && TREE_CODE (exp) != VAR_DECL
6299 && TREE_CODE (exp) != CALL_EXPR
6300 && TREE_CODE (exp) != RTL_EXPR
6301 && GET_MODE_CLASS (tmode) == MODE_INT
6302 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6303 internal_error ("unsupported wide integer operation");
6305 check_max_integer_computation_mode (exp);
6308 /* If will do cse, generate all results into pseudo registers
6309 since 1) that allows cse to find more things
6310 and 2) otherwise cse could produce an insn the machine
6313 if (! cse_not_expected && mode != BLKmode && target
6314 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6321 tree function = decl_function_context (exp);
6322 /* Handle using a label in a containing function. */
6323 if (function != current_function_decl
6324 && function != inline_function_decl && function != 0)
6326 struct function *p = find_function_data (function);
6327 p->expr->x_forced_labels
6328 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6329 p->expr->x_forced_labels);
6333 if (modifier == EXPAND_INITIALIZER)
6334 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6339 temp = gen_rtx_MEM (FUNCTION_MODE,
6340 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6341 if (function != current_function_decl
6342 && function != inline_function_decl && function != 0)
6343 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6348 if (DECL_RTL (exp) == 0)
6350 error_with_decl (exp, "prior parameter's size depends on `%s'");
6351 return CONST0_RTX (mode);
6354 /* ... fall through ... */
6357 /* If a static var's type was incomplete when the decl was written,
6358 but the type is complete now, lay out the decl now. */
6359 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6360 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6362 layout_decl (exp, 0);
6363 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6366 /* Although static-storage variables start off initialized, according to
6367 ANSI C, a memcpy could overwrite them with uninitialized values. So
6368 we check them too. This also lets us check for read-only variables
6369 accessed via a non-const declaration, in case it won't be detected
6370 any other way (e.g., in an embedded system or OS kernel without
6373 Aggregates are not checked here; they're handled elsewhere. */
6374 if (cfun && current_function_check_memory_usage
6376 && GET_CODE (DECL_RTL (exp)) == MEM
6377 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6379 enum memory_use_mode memory_usage;
6380 memory_usage = get_memory_usage_from_modifier (modifier);
6382 in_check_memory_usage = 1;
6383 if (memory_usage != MEMORY_USE_DONT)
6384 emit_library_call (chkr_check_addr_libfunc,
6385 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6386 XEXP (DECL_RTL (exp), 0), Pmode,
6387 GEN_INT (int_size_in_bytes (type)),
6388 TYPE_MODE (sizetype),
6389 GEN_INT (memory_usage),
6390 TYPE_MODE (integer_type_node));
6391 in_check_memory_usage = 0;
6394 /* ... fall through ... */
6398 if (DECL_RTL (exp) == 0)
6401 /* Ensure variable marked as used even if it doesn't go through
6402 a parser. If it hasn't be used yet, write out an external
6404 if (! TREE_USED (exp))
6406 assemble_external (exp);
6407 TREE_USED (exp) = 1;
6410 /* Show we haven't gotten RTL for this yet. */
6413 /* Handle variables inherited from containing functions. */
6414 context = decl_function_context (exp);
6416 /* We treat inline_function_decl as an alias for the current function
6417 because that is the inline function whose vars, types, etc.
6418 are being merged into the current function.
6419 See expand_inline_function. */
6421 if (context != 0 && context != current_function_decl
6422 && context != inline_function_decl
6423 /* If var is static, we don't need a static chain to access it. */
6424 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6425 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6429 /* Mark as non-local and addressable. */
6430 DECL_NONLOCAL (exp) = 1;
6431 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6433 mark_addressable (exp);
6434 if (GET_CODE (DECL_RTL (exp)) != MEM)
6436 addr = XEXP (DECL_RTL (exp), 0);
6437 if (GET_CODE (addr) == MEM)
6439 = replace_equiv_address (addr,
6440 fix_lexical_addr (XEXP (addr, 0), exp));
6442 addr = fix_lexical_addr (addr, exp);
6444 temp = replace_equiv_address (DECL_RTL (exp), addr);
6447 /* This is the case of an array whose size is to be determined
6448 from its initializer, while the initializer is still being parsed.
6451 else if (GET_CODE (DECL_RTL (exp)) == MEM
6452 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6453 temp = validize_mem (DECL_RTL (exp));
6455 /* If DECL_RTL is memory, we are in the normal case and either
6456 the address is not valid or it is not a register and -fforce-addr
6457 is specified, get the address into a register. */
6459 else if (GET_CODE (DECL_RTL (exp)) == MEM
6460 && modifier != EXPAND_CONST_ADDRESS
6461 && modifier != EXPAND_SUM
6462 && modifier != EXPAND_INITIALIZER
6463 && (! memory_address_p (DECL_MODE (exp),
6464 XEXP (DECL_RTL (exp), 0))
6466 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6467 temp = replace_equiv_address (DECL_RTL (exp),
6468 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6470 /* If we got something, return it. But first, set the alignment
6471 if the address is a register. */
6474 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6475 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6480 /* If the mode of DECL_RTL does not match that of the decl, it
6481 must be a promoted value. We return a SUBREG of the wanted mode,
6482 but mark it so that we know that it was already extended. */
6484 if (GET_CODE (DECL_RTL (exp)) == REG
6485 && GET_MODE (DECL_RTL (exp)) != mode)
6487 /* Get the signedness used for this variable. Ensure we get the
6488 same mode we got when the variable was declared. */
6489 if (GET_MODE (DECL_RTL (exp))
6490 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6493 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6494 SUBREG_PROMOTED_VAR_P (temp) = 1;
6495 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6499 return DECL_RTL (exp);
6502 return immed_double_const (TREE_INT_CST_LOW (exp),
6503 TREE_INT_CST_HIGH (exp), mode);
6506 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6507 EXPAND_MEMORY_USE_BAD);
6510 /* If optimized, generate immediate CONST_DOUBLE
6511 which will be turned into memory by reload if necessary.
6513 We used to force a register so that loop.c could see it. But
6514 this does not allow gen_* patterns to perform optimizations with
6515 the constants. It also produces two insns in cases like "x = 1.0;".
6516 On most machines, floating-point constants are not permitted in
6517 many insns, so we'd end up copying it to a register in any case.
6519 Now, we do the copying in expand_binop, if appropriate. */
6520 return immed_real_const (exp);
6524 if (! TREE_CST_RTL (exp))
6525 output_constant_def (exp, 1);
6527 /* TREE_CST_RTL probably contains a constant address.
6528 On RISC machines where a constant address isn't valid,
6529 make some insns to get that address into a register. */
6530 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6531 && modifier != EXPAND_CONST_ADDRESS
6532 && modifier != EXPAND_INITIALIZER
6533 && modifier != EXPAND_SUM
6534 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6536 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6537 return replace_equiv_address (TREE_CST_RTL (exp),
6538 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6539 return TREE_CST_RTL (exp);
6541 case EXPR_WITH_FILE_LOCATION:
6544 const char *saved_input_filename = input_filename;
6545 int saved_lineno = lineno;
6546 input_filename = EXPR_WFL_FILENAME (exp);
6547 lineno = EXPR_WFL_LINENO (exp);
6548 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6549 emit_line_note (input_filename, lineno);
6550 /* Possibly avoid switching back and forth here. */
6551 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6552 input_filename = saved_input_filename;
6553 lineno = saved_lineno;
6558 context = decl_function_context (exp);
6560 /* If this SAVE_EXPR was at global context, assume we are an
6561 initialization function and move it into our context. */
6563 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6565 /* We treat inline_function_decl as an alias for the current function
6566 because that is the inline function whose vars, types, etc.
6567 are being merged into the current function.
6568 See expand_inline_function. */
6569 if (context == current_function_decl || context == inline_function_decl)
6572 /* If this is non-local, handle it. */
6575 /* The following call just exists to abort if the context is
6576 not of a containing function. */
6577 find_function_data (context);
6579 temp = SAVE_EXPR_RTL (exp);
6580 if (temp && GET_CODE (temp) == REG)
6582 put_var_into_stack (exp);
6583 temp = SAVE_EXPR_RTL (exp);
6585 if (temp == 0 || GET_CODE (temp) != MEM)
6588 replace_equiv_address (temp,
6589 fix_lexical_addr (XEXP (temp, 0), exp));
6591 if (SAVE_EXPR_RTL (exp) == 0)
6593 if (mode == VOIDmode)
6596 temp = assign_temp (build_qualified_type (type,
6598 | TYPE_QUAL_CONST)),
6601 SAVE_EXPR_RTL (exp) = temp;
6602 if (!optimize && GET_CODE (temp) == REG)
6603 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6606 /* If the mode of TEMP does not match that of the expression, it
6607 must be a promoted value. We pass store_expr a SUBREG of the
6608 wanted mode but mark it so that we know that it was already
6609 extended. Note that `unsignedp' was modified above in
6612 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6614 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6615 SUBREG_PROMOTED_VAR_P (temp) = 1;
6616 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6619 if (temp == const0_rtx)
6620 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6621 EXPAND_MEMORY_USE_BAD);
6623 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6625 TREE_USED (exp) = 1;
6628 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6629 must be a promoted value. We return a SUBREG of the wanted mode,
6630 but mark it so that we know that it was already extended. */
6632 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6633 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6635 /* Compute the signedness and make the proper SUBREG. */
6636 promote_mode (type, mode, &unsignedp, 0);
6637 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6638 SUBREG_PROMOTED_VAR_P (temp) = 1;
6639 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6643 return SAVE_EXPR_RTL (exp);
6648 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6649 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6653 case PLACEHOLDER_EXPR:
6655 tree old_list = placeholder_list;
6656 tree placeholder_expr = 0;
6658 exp = find_placeholder (exp, &placeholder_expr);
6662 placeholder_list = TREE_CHAIN (placeholder_expr);
6663 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6664 placeholder_list = old_list;
6668 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6671 case WITH_RECORD_EXPR:
6672 /* Put the object on the placeholder list, expand our first operand,
6673 and pop the list. */
6674 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6676 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6677 tmode, ro_modifier);
6678 placeholder_list = TREE_CHAIN (placeholder_list);
6682 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6683 expand_goto (TREE_OPERAND (exp, 0));
6685 expand_computed_goto (TREE_OPERAND (exp, 0));
6689 expand_exit_loop_if_false (NULL,
6690 invert_truthvalue (TREE_OPERAND (exp, 0)));
6693 case LABELED_BLOCK_EXPR:
6694 if (LABELED_BLOCK_BODY (exp))
6695 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6696 /* Should perhaps use expand_label, but this is simpler and safer. */
6697 do_pending_stack_adjust ();
6698 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6701 case EXIT_BLOCK_EXPR:
6702 if (EXIT_BLOCK_RETURN (exp))
6703 sorry ("returned value in block_exit_expr");
6704 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6709 expand_start_loop (1);
6710 expand_expr_stmt (TREE_OPERAND (exp, 0));
6718 tree vars = TREE_OPERAND (exp, 0);
6719 int vars_need_expansion = 0;
6721 /* Need to open a binding contour here because
6722 if there are any cleanups they must be contained here. */
6723 expand_start_bindings (2);
6725 /* Mark the corresponding BLOCK for output in its proper place. */
6726 if (TREE_OPERAND (exp, 2) != 0
6727 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6728 insert_block (TREE_OPERAND (exp, 2));
6730 /* If VARS have not yet been expanded, expand them now. */
6733 if (!DECL_RTL_SET_P (vars))
6735 vars_need_expansion = 1;
6738 expand_decl_init (vars);
6739 vars = TREE_CHAIN (vars);
6742 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6744 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6750 if (RTL_EXPR_SEQUENCE (exp))
6752 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6754 emit_insns (RTL_EXPR_SEQUENCE (exp));
6755 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6757 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6758 free_temps_for_rtl_expr (exp);
6759 return RTL_EXPR_RTL (exp);
6762 /* If we don't need the result, just ensure we evaluate any
6767 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6768 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6769 EXPAND_MEMORY_USE_BAD);
6773 /* All elts simple constants => refer to a constant in memory. But
6774 if this is a non-BLKmode mode, let it store a field at a time
6775 since that should make a CONST_INT or CONST_DOUBLE when we
6776 fold. Likewise, if we have a target we can use, it is best to
6777 store directly into the target unless the type is large enough
6778 that memcpy will be used. If we are making an initializer and
6779 all operands are constant, put it in memory as well. */
6780 else if ((TREE_STATIC (exp)
6781 && ((mode == BLKmode
6782 && ! (target != 0 && safe_from_p (target, exp, 1)))
6783 || TREE_ADDRESSABLE (exp)
6784 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6785 && (! MOVE_BY_PIECES_P
6786 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6788 && ! mostly_zeros_p (exp))))
6789 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6791 rtx constructor = output_constant_def (exp, 1);
6793 if (modifier != EXPAND_CONST_ADDRESS
6794 && modifier != EXPAND_INITIALIZER
6795 && modifier != EXPAND_SUM)
6796 constructor = validize_mem (constructor);
6802 /* Handle calls that pass values in multiple non-contiguous
6803 locations. The Irix 6 ABI has examples of this. */
6804 if (target == 0 || ! safe_from_p (target, exp, 1)
6805 || GET_CODE (target) == PARALLEL)
6807 = assign_temp (build_qualified_type (type,
6809 | (TREE_READONLY (exp)
6810 * TYPE_QUAL_CONST))),
6811 TREE_ADDRESSABLE (exp), 1, 1);
6813 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6814 int_size_in_bytes (TREE_TYPE (exp)));
6820 tree exp1 = TREE_OPERAND (exp, 0);
6822 tree string = string_constant (exp1, &index);
6824 /* Try to optimize reads from const strings. */
6826 && TREE_CODE (string) == STRING_CST
6827 && TREE_CODE (index) == INTEGER_CST
6828 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6829 && GET_MODE_CLASS (mode) == MODE_INT
6830 && GET_MODE_SIZE (mode) == 1
6831 && modifier != EXPAND_MEMORY_USE_WO)
6833 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6835 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6836 op0 = memory_address (mode, op0);
6838 if (cfun && current_function_check_memory_usage
6839 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6841 enum memory_use_mode memory_usage;
6842 memory_usage = get_memory_usage_from_modifier (modifier);
6844 if (memory_usage != MEMORY_USE_DONT)
6846 in_check_memory_usage = 1;
6847 emit_library_call (chkr_check_addr_libfunc,
6848 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6849 Pmode, GEN_INT (int_size_in_bytes (type)),
6850 TYPE_MODE (sizetype),
6851 GEN_INT (memory_usage),
6852 TYPE_MODE (integer_type_node));
6853 in_check_memory_usage = 0;
6857 temp = gen_rtx_MEM (mode, op0);
6858 set_mem_attributes (temp, exp, 0);
6860 /* If we are writing to this object and its type is a record with
6861 readonly fields, we must mark it as readonly so it will
6862 conflict with readonly references to those fields. */
6863 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6864 RTX_UNCHANGING_P (temp) = 1;
6870 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6874 tree array = TREE_OPERAND (exp, 0);
6875 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6876 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6877 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6880 /* Optimize the special-case of a zero lower bound.
6882 We convert the low_bound to sizetype to avoid some problems
6883 with constant folding. (E.g. suppose the lower bound is 1,
6884 and its mode is QI. Without the conversion, (ARRAY
6885 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6886 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6888 if (! integer_zerop (low_bound))
6889 index = size_diffop (index, convert (sizetype, low_bound));
6891 /* Fold an expression like: "foo"[2].
6892 This is not done in fold so it won't happen inside &.
6893 Don't fold if this is for wide characters since it's too
6894 difficult to do correctly and this is a very rare case. */
6896 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6897 && TREE_CODE (array) == STRING_CST
6898 && TREE_CODE (index) == INTEGER_CST
6899 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6900 && GET_MODE_CLASS (mode) == MODE_INT
6901 && GET_MODE_SIZE (mode) == 1)
6903 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6905 /* If this is a constant index into a constant array,
6906 just get the value from the array. Handle both the cases when
6907 we have an explicit constructor and when our operand is a variable
6908 that was declared const. */
6910 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6911 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6912 && TREE_CODE (index) == INTEGER_CST
6913 && 0 > compare_tree_int (index,
6914 list_length (CONSTRUCTOR_ELTS
6915 (TREE_OPERAND (exp, 0)))))
6919 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6920 i = TREE_INT_CST_LOW (index);
6921 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6925 return expand_expr (fold (TREE_VALUE (elem)), target,
6926 tmode, ro_modifier);
6929 else if (optimize >= 1
6930 && modifier != EXPAND_CONST_ADDRESS
6931 && modifier != EXPAND_INITIALIZER
6932 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6933 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6934 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6936 if (TREE_CODE (index) == INTEGER_CST)
6938 tree init = DECL_INITIAL (array);
6940 if (TREE_CODE (init) == CONSTRUCTOR)
6944 for (elem = CONSTRUCTOR_ELTS (init);
6946 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6947 elem = TREE_CHAIN (elem))
6950 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6951 return expand_expr (fold (TREE_VALUE (elem)), target,
6952 tmode, ro_modifier);
6954 else if (TREE_CODE (init) == STRING_CST
6955 && 0 > compare_tree_int (index,
6956 TREE_STRING_LENGTH (init)))
6958 tree type = TREE_TYPE (TREE_TYPE (init));
6959 enum machine_mode mode = TYPE_MODE (type);
6961 if (GET_MODE_CLASS (mode) == MODE_INT
6962 && GET_MODE_SIZE (mode) == 1)
6964 (TREE_STRING_POINTER
6965 (init)[TREE_INT_CST_LOW (index)]));
6974 case ARRAY_RANGE_REF:
6975 /* If the operand is a CONSTRUCTOR, we can just extract the
6976 appropriate field if it is present. Don't do this if we have
6977 already written the data since we want to refer to that copy
6978 and varasm.c assumes that's what we'll do. */
6979 if (code == COMPONENT_REF
6980 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6981 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6985 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6986 elt = TREE_CHAIN (elt))
6987 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6988 /* We can normally use the value of the field in the
6989 CONSTRUCTOR. However, if this is a bitfield in
6990 an integral mode that we can fit in a HOST_WIDE_INT,
6991 we must mask only the number of bits in the bitfield,
6992 since this is done implicitly by the constructor. If
6993 the bitfield does not meet either of those conditions,
6994 we can't do this optimization. */
6995 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6996 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6998 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6999 <= HOST_BITS_PER_WIDE_INT))))
7001 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7002 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7004 HOST_WIDE_INT bitsize
7005 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7007 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7009 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7010 op0 = expand_and (op0, op1, target);
7014 enum machine_mode imode
7015 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7017 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7020 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7022 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7032 enum machine_mode mode1;
7033 HOST_WIDE_INT bitsize, bitpos;
7036 unsigned int alignment;
7037 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7038 &mode1, &unsignedp, &volatilep,
7042 /* If we got back the original object, something is wrong. Perhaps
7043 we are evaluating an expression too early. In any event, don't
7044 infinitely recurse. */
7048 /* If TEM's type is a union of variable size, pass TARGET to the inner
7049 computation, since it will need a temporary and TARGET is known
7050 to have to do. This occurs in unchecked conversion in Ada. */
7054 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7055 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7057 ? target : NULL_RTX),
7059 (modifier == EXPAND_INITIALIZER
7060 || modifier == EXPAND_CONST_ADDRESS)
7061 ? modifier : EXPAND_NORMAL);
7063 /* If this is a constant, put it into a register if it is a
7064 legitimate constant and OFFSET is 0 and memory if it isn't. */
7065 if (CONSTANT_P (op0))
7067 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7068 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7070 op0 = force_reg (mode, op0);
7072 op0 = validize_mem (force_const_mem (mode, op0));
7077 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7079 /* If this object is in a register, put it into memory.
7080 This case can't occur in C, but can in Ada if we have
7081 unchecked conversion of an expression from a scalar type to
7082 an array or record type. */
7083 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7084 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7086 /* If the operand is a SAVE_EXPR, we can deal with this by
7087 forcing the SAVE_EXPR into memory. */
7088 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7090 put_var_into_stack (TREE_OPERAND (exp, 0));
7091 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7096 = build_qualified_type (TREE_TYPE (tem),
7097 (TYPE_QUALS (TREE_TYPE (tem))
7098 | TYPE_QUAL_CONST));
7099 rtx memloc = assign_temp (nt, 1, 1, 1);
7101 mark_temp_addr_taken (memloc);
7102 emit_move_insn (memloc, op0);
7107 if (GET_CODE (op0) != MEM)
7110 if (GET_MODE (offset_rtx) != ptr_mode)
7111 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7113 #ifdef POINTERS_EXTEND_UNSIGNED
7114 if (GET_MODE (offset_rtx) != Pmode)
7115 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7118 /* A constant address in OP0 can have VOIDmode, we must not try
7119 to call force_reg for that case. Avoid that case. */
7120 if (GET_CODE (op0) == MEM
7121 && GET_MODE (op0) == BLKmode
7122 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7124 && (bitpos % bitsize) == 0
7125 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7126 && alignment == GET_MODE_ALIGNMENT (mode1))
7128 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7130 if (GET_CODE (XEXP (temp, 0)) == REG)
7133 op0 = (replace_equiv_address
7135 force_reg (GET_MODE (XEXP (temp, 0)),
7140 op0 = offset_address (op0, offset_rtx,
7141 highest_pow2_factor (offset));
7144 /* Don't forget about volatility even if this is a bitfield. */
7145 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7147 if (op0 == orig_op0)
7148 op0 = copy_rtx (op0);
7150 MEM_VOLATILE_P (op0) = 1;
7153 /* Check the access. */
7154 if (cfun != 0 && current_function_check_memory_usage
7155 && GET_CODE (op0) == MEM)
7157 enum memory_use_mode memory_usage;
7158 memory_usage = get_memory_usage_from_modifier (modifier);
7160 if (memory_usage != MEMORY_USE_DONT)
7165 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7166 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7168 /* Check the access right of the pointer. */
7169 in_check_memory_usage = 1;
7170 if (size > BITS_PER_UNIT)
7171 emit_library_call (chkr_check_addr_libfunc,
7172 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7173 Pmode, GEN_INT (size / BITS_PER_UNIT),
7174 TYPE_MODE (sizetype),
7175 GEN_INT (memory_usage),
7176 TYPE_MODE (integer_type_node));
7177 in_check_memory_usage = 0;
7181 /* In cases where an aligned union has an unaligned object
7182 as a field, we might be extracting a BLKmode value from
7183 an integer-mode (e.g., SImode) object. Handle this case
7184 by doing the extract into an object as wide as the field
7185 (which we know to be the width of a basic mode), then
7186 storing into memory, and changing the mode to BLKmode. */
7187 if (mode1 == VOIDmode
7188 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7189 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7190 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7191 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7192 && modifier != EXPAND_CONST_ADDRESS
7193 && modifier != EXPAND_INITIALIZER)
7194 /* If the field isn't aligned enough to fetch as a memref,
7195 fetch it as a bit field. */
7196 || (mode1 != BLKmode
7197 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7198 && ((TYPE_ALIGN (TREE_TYPE (tem))
7199 < GET_MODE_ALIGNMENT (mode))
7200 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7201 /* If the type and the field are a constant size and the
7202 size of the type isn't the same size as the bitfield,
7203 we must use bitfield operations. */
7205 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7207 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7210 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7211 && (TYPE_ALIGN (type) > alignment
7212 || bitpos % TYPE_ALIGN (type) != 0)))
7214 enum machine_mode ext_mode = mode;
7216 if (ext_mode == BLKmode
7217 && ! (target != 0 && GET_CODE (op0) == MEM
7218 && GET_CODE (target) == MEM
7219 && bitpos % BITS_PER_UNIT == 0))
7220 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7222 if (ext_mode == BLKmode)
7224 /* In this case, BITPOS must start at a byte boundary and
7225 TARGET, if specified, must be a MEM. */
7226 if (GET_CODE (op0) != MEM
7227 || (target != 0 && GET_CODE (target) != MEM)
7228 || bitpos % BITS_PER_UNIT != 0)
7231 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7233 target = assign_temp (type, 0, 1, 1);
7235 emit_block_move (target, op0,
7236 bitsize == -1 ? expr_size (exp)
7237 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7243 op0 = validize_mem (op0);
7245 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7246 mark_reg_pointer (XEXP (op0, 0), alignment);
7248 op0 = extract_bit_field (op0, bitsize, bitpos,
7249 unsignedp, target, ext_mode, ext_mode,
7251 int_size_in_bytes (TREE_TYPE (tem)));
7253 /* If the result is a record type and BITSIZE is narrower than
7254 the mode of OP0, an integral mode, and this is a big endian
7255 machine, we must put the field into the high-order bits. */
7256 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7257 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7258 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7259 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7260 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7264 if (mode == BLKmode)
7266 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7268 rtx new = assign_temp (nt, 0, 1, 1);
7270 emit_move_insn (new, op0);
7271 op0 = copy_rtx (new);
7272 PUT_MODE (op0, BLKmode);
7278 /* If the result is BLKmode, use that to access the object
7280 if (mode == BLKmode)
7283 /* Get a reference to just this component. */
7284 if (modifier == EXPAND_CONST_ADDRESS
7285 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7286 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7288 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7290 if (op0 == orig_op0)
7291 op0 = copy_rtx (op0);
7293 set_mem_attributes (op0, exp, 0);
7294 if (GET_CODE (XEXP (op0, 0)) == REG)
7295 mark_reg_pointer (XEXP (op0, 0), alignment);
7297 MEM_VOLATILE_P (op0) |= volatilep;
7298 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7299 || modifier == EXPAND_CONST_ADDRESS
7300 || modifier == EXPAND_INITIALIZER)
7302 else if (target == 0)
7303 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7305 convert_move (target, op0, unsignedp);
7311 rtx insn, before = get_last_insn (), vtbl_ref;
7313 /* Evaluate the interior expression. */
7314 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7317 /* Get or create an instruction off which to hang a note. */
7318 if (REG_P (subtarget))
7321 insn = get_last_insn ();
7324 if (! INSN_P (insn))
7325 insn = prev_nonnote_insn (insn);
7329 target = gen_reg_rtx (GET_MODE (subtarget));
7330 insn = emit_move_insn (target, subtarget);
7333 /* Collect the data for the note. */
7334 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7335 vtbl_ref = plus_constant (vtbl_ref,
7336 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7337 /* Discard the initial CONST that was added. */
7338 vtbl_ref = XEXP (vtbl_ref, 0);
7341 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7346 /* Intended for a reference to a buffer of a file-object in Pascal.
7347 But it's not certain that a special tree code will really be
7348 necessary for these. INDIRECT_REF might work for them. */
7354 /* Pascal set IN expression.
7357 rlo = set_low - (set_low%bits_per_word);
7358 the_word = set [ (index - rlo)/bits_per_word ];
7359 bit_index = index % bits_per_word;
7360 bitmask = 1 << bit_index;
7361 return !!(the_word & bitmask); */
7363 tree set = TREE_OPERAND (exp, 0);
7364 tree index = TREE_OPERAND (exp, 1);
7365 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7366 tree set_type = TREE_TYPE (set);
7367 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7368 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7369 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7370 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7371 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7372 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7373 rtx setaddr = XEXP (setval, 0);
7374 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7376 rtx diff, quo, rem, addr, bit, result;
7378 /* If domain is empty, answer is no. Likewise if index is constant
7379 and out of bounds. */
7380 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7381 && TREE_CODE (set_low_bound) == INTEGER_CST
7382 && tree_int_cst_lt (set_high_bound, set_low_bound))
7383 || (TREE_CODE (index) == INTEGER_CST
7384 && TREE_CODE (set_low_bound) == INTEGER_CST
7385 && tree_int_cst_lt (index, set_low_bound))
7386 || (TREE_CODE (set_high_bound) == INTEGER_CST
7387 && TREE_CODE (index) == INTEGER_CST
7388 && tree_int_cst_lt (set_high_bound, index))))
7392 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7394 /* If we get here, we have to generate the code for both cases
7395 (in range and out of range). */
7397 op0 = gen_label_rtx ();
7398 op1 = gen_label_rtx ();
7400 if (! (GET_CODE (index_val) == CONST_INT
7401 && GET_CODE (lo_r) == CONST_INT))
7403 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7404 GET_MODE (index_val), iunsignedp, 0, op1);
7407 if (! (GET_CODE (index_val) == CONST_INT
7408 && GET_CODE (hi_r) == CONST_INT))
7410 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7411 GET_MODE (index_val), iunsignedp, 0, op1);
7414 /* Calculate the element number of bit zero in the first word
7416 if (GET_CODE (lo_r) == CONST_INT)
7417 rlow = GEN_INT (INTVAL (lo_r)
7418 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7420 rlow = expand_binop (index_mode, and_optab, lo_r,
7421 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7422 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7424 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7425 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7427 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7428 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7429 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7430 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7432 addr = memory_address (byte_mode,
7433 expand_binop (index_mode, add_optab, diff,
7434 setaddr, NULL_RTX, iunsignedp,
7437 /* Extract the bit we want to examine. */
7438 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7439 gen_rtx_MEM (byte_mode, addr),
7440 make_tree (TREE_TYPE (index), rem),
7442 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7443 GET_MODE (target) == byte_mode ? target : 0,
7444 1, OPTAB_LIB_WIDEN);
7446 if (result != target)
7447 convert_move (target, result, 1);
7449 /* Output the code to handle the out-of-range case. */
7452 emit_move_insn (target, const0_rtx);
7457 case WITH_CLEANUP_EXPR:
7458 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7460 WITH_CLEANUP_EXPR_RTL (exp)
7461 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7462 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7464 /* That's it for this cleanup. */
7465 TREE_OPERAND (exp, 1) = 0;
7467 return WITH_CLEANUP_EXPR_RTL (exp);
7469 case CLEANUP_POINT_EXPR:
7471 /* Start a new binding layer that will keep track of all cleanup
7472 actions to be performed. */
7473 expand_start_bindings (2);
7475 target_temp_slot_level = temp_slot_level;
7477 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7478 /* If we're going to use this value, load it up now. */
7480 op0 = force_not_mem (op0);
7481 preserve_temp_slots (op0);
7482 expand_end_bindings (NULL_TREE, 0, 0);
7487 /* Check for a built-in function. */
7488 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7489 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7491 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7493 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7494 == BUILT_IN_FRONTEND)
7495 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7497 return expand_builtin (exp, target, subtarget, tmode, ignore);
7500 return expand_call (exp, target, ignore);
7502 case NON_LVALUE_EXPR:
7505 case REFERENCE_EXPR:
7506 if (TREE_OPERAND (exp, 0) == error_mark_node)
7509 if (TREE_CODE (type) == UNION_TYPE)
7511 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7513 /* If both input and output are BLKmode, this conversion
7514 isn't actually doing anything unless we need to make the
7515 alignment stricter. */
7516 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7517 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7518 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7519 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7523 target = assign_temp (type, 0, 1, 1);
7525 if (GET_CODE (target) == MEM)
7526 /* Store data into beginning of memory target. */
7527 store_expr (TREE_OPERAND (exp, 0),
7528 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7530 else if (GET_CODE (target) == REG)
7531 /* Store this field into a union of the proper type. */
7532 store_field (target,
7533 MIN ((int_size_in_bytes (TREE_TYPE
7534 (TREE_OPERAND (exp, 0)))
7536 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7537 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7538 VOIDmode, 0, BITS_PER_UNIT,
7539 int_size_in_bytes (type), 0);
7543 /* Return the entire union. */
7547 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7549 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7552 /* If the signedness of the conversion differs and OP0 is
7553 a promoted SUBREG, clear that indication since we now
7554 have to do the proper extension. */
7555 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7556 && GET_CODE (op0) == SUBREG)
7557 SUBREG_PROMOTED_VAR_P (op0) = 0;
7562 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7563 if (GET_MODE (op0) == mode)
7566 /* If OP0 is a constant, just convert it into the proper mode. */
7567 if (CONSTANT_P (op0))
7569 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7570 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7572 if (modifier == EXPAND_INITIALIZER)
7573 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7577 convert_to_mode (mode, op0,
7578 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7580 convert_move (target, op0,
7581 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7585 /* We come here from MINUS_EXPR when the second operand is a
7588 this_optab = ! unsignedp && flag_trapv
7589 && (GET_MODE_CLASS(mode) == MODE_INT)
7590 ? addv_optab : add_optab;
7592 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7593 something else, make sure we add the register to the constant and
7594 then to the other thing. This case can occur during strength
7595 reduction and doing it this way will produce better code if the
7596 frame pointer or argument pointer is eliminated.
7598 fold-const.c will ensure that the constant is always in the inner
7599 PLUS_EXPR, so the only case we need to do anything about is if
7600 sp, ap, or fp is our second argument, in which case we must swap
7601 the innermost first argument and our second argument. */
7603 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7604 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7605 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7606 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7607 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7608 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7610 tree t = TREE_OPERAND (exp, 1);
7612 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7613 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7616 /* If the result is to be ptr_mode and we are adding an integer to
7617 something, we might be forming a constant. So try to use
7618 plus_constant. If it produces a sum and we can't accept it,
7619 use force_operand. This allows P = &ARR[const] to generate
7620 efficient code on machines where a SYMBOL_REF is not a valid
7623 If this is an EXPAND_SUM call, always return the sum. */
7624 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7625 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7627 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7628 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7629 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7633 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7635 /* Use immed_double_const to ensure that the constant is
7636 truncated according to the mode of OP1, then sign extended
7637 to a HOST_WIDE_INT. Using the constant directly can result
7638 in non-canonical RTL in a 64x32 cross compile. */
7640 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7642 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7643 op1 = plus_constant (op1, INTVAL (constant_part));
7644 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7645 op1 = force_operand (op1, target);
7649 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7650 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7651 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7655 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7657 if (! CONSTANT_P (op0))
7659 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7660 VOIDmode, modifier);
7661 /* Don't go to both_summands if modifier
7662 says it's not right to return a PLUS. */
7663 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7667 /* Use immed_double_const to ensure that the constant is
7668 truncated according to the mode of OP1, then sign extended
7669 to a HOST_WIDE_INT. Using the constant directly can result
7670 in non-canonical RTL in a 64x32 cross compile. */
7672 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7674 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7675 op0 = plus_constant (op0, INTVAL (constant_part));
7676 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7677 op0 = force_operand (op0, target);
7682 /* No sense saving up arithmetic to be done
7683 if it's all in the wrong mode to form part of an address.
7684 And force_operand won't know whether to sign-extend or
7686 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7687 || mode != ptr_mode)
7690 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7693 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7694 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7697 /* Make sure any term that's a sum with a constant comes last. */
7698 if (GET_CODE (op0) == PLUS
7699 && CONSTANT_P (XEXP (op0, 1)))
7705 /* If adding to a sum including a constant,
7706 associate it to put the constant outside. */
7707 if (GET_CODE (op1) == PLUS
7708 && CONSTANT_P (XEXP (op1, 1)))
7710 rtx constant_term = const0_rtx;
7712 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7715 /* Ensure that MULT comes first if there is one. */
7716 else if (GET_CODE (op0) == MULT)
7717 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7719 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7721 /* Let's also eliminate constants from op0 if possible. */
7722 op0 = eliminate_constant_term (op0, &constant_term);
7724 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7725 their sum should be a constant. Form it into OP1, since the
7726 result we want will then be OP0 + OP1. */
7728 temp = simplify_binary_operation (PLUS, mode, constant_term,
7733 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7736 /* Put a constant term last and put a multiplication first. */
7737 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7738 temp = op1, op1 = op0, op0 = temp;
7740 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7741 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7744 /* For initializers, we are allowed to return a MINUS of two
7745 symbolic constants. Here we handle all cases when both operands
7747 /* Handle difference of two symbolic constants,
7748 for the sake of an initializer. */
7749 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7750 && really_constant_p (TREE_OPERAND (exp, 0))
7751 && really_constant_p (TREE_OPERAND (exp, 1)))
7753 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7754 VOIDmode, ro_modifier);
7755 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7756 VOIDmode, ro_modifier);
7758 /* If the last operand is a CONST_INT, use plus_constant of
7759 the negated constant. Else make the MINUS. */
7760 if (GET_CODE (op1) == CONST_INT)
7761 return plus_constant (op0, - INTVAL (op1));
7763 return gen_rtx_MINUS (mode, op0, op1);
7765 /* Convert A - const to A + (-const). */
7766 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7768 tree negated = fold (build1 (NEGATE_EXPR, type,
7769 TREE_OPERAND (exp, 1)));
7771 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7772 /* If we can't negate the constant in TYPE, leave it alone and
7773 expand_binop will negate it for us. We used to try to do it
7774 here in the signed version of TYPE, but that doesn't work
7775 on POINTER_TYPEs. */;
7778 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7782 this_optab = ! unsignedp && flag_trapv
7783 && (GET_MODE_CLASS(mode) == MODE_INT)
7784 ? subv_optab : sub_optab;
7788 /* If first operand is constant, swap them.
7789 Thus the following special case checks need only
7790 check the second operand. */
7791 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7793 tree t1 = TREE_OPERAND (exp, 0);
7794 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7795 TREE_OPERAND (exp, 1) = t1;
7798 /* Attempt to return something suitable for generating an
7799 indexed address, for machines that support that. */
7801 if (modifier == EXPAND_SUM && mode == ptr_mode
7802 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7803 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7805 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7808 /* Apply distributive law if OP0 is x+c. */
7809 if (GET_CODE (op0) == PLUS
7810 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7815 (mode, XEXP (op0, 0),
7816 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7817 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7818 * INTVAL (XEXP (op0, 1))));
7820 if (GET_CODE (op0) != REG)
7821 op0 = force_operand (op0, NULL_RTX);
7822 if (GET_CODE (op0) != REG)
7823 op0 = copy_to_mode_reg (mode, op0);
7826 gen_rtx_MULT (mode, op0,
7827 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7830 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7833 /* Check for multiplying things that have been extended
7834 from a narrower type. If this machine supports multiplying
7835 in that narrower type with a result in the desired type,
7836 do it that way, and avoid the explicit type-conversion. */
7837 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7838 && TREE_CODE (type) == INTEGER_TYPE
7839 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7840 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7841 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7842 && int_fits_type_p (TREE_OPERAND (exp, 1),
7843 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7844 /* Don't use a widening multiply if a shift will do. */
7845 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7846 > HOST_BITS_PER_WIDE_INT)
7847 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7849 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7850 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7852 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7853 /* If both operands are extended, they must either both
7854 be zero-extended or both be sign-extended. */
7855 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7857 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7859 enum machine_mode innermode
7860 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7861 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7862 ? smul_widen_optab : umul_widen_optab);
7863 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7864 ? umul_widen_optab : smul_widen_optab);
7865 if (mode == GET_MODE_WIDER_MODE (innermode))
7867 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7869 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7870 NULL_RTX, VOIDmode, 0);
7871 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7872 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7875 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7876 NULL_RTX, VOIDmode, 0);
7879 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7880 && innermode == word_mode)
7883 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7884 NULL_RTX, VOIDmode, 0);
7885 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7886 op1 = convert_modes (innermode, mode,
7887 expand_expr (TREE_OPERAND (exp, 1),
7888 NULL_RTX, VOIDmode, 0),
7891 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7892 NULL_RTX, VOIDmode, 0);
7893 temp = expand_binop (mode, other_optab, op0, op1, target,
7894 unsignedp, OPTAB_LIB_WIDEN);
7895 htem = expand_mult_highpart_adjust (innermode,
7896 gen_highpart (innermode, temp),
7898 gen_highpart (innermode, temp),
7900 emit_move_insn (gen_highpart (innermode, temp), htem);
7905 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7906 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7907 return expand_mult (mode, op0, op1, target, unsignedp);
7909 case TRUNC_DIV_EXPR:
7910 case FLOOR_DIV_EXPR:
7912 case ROUND_DIV_EXPR:
7913 case EXACT_DIV_EXPR:
7914 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7916 /* Possible optimization: compute the dividend with EXPAND_SUM
7917 then if the divisor is constant can optimize the case
7918 where some terms of the dividend have coeffs divisible by it. */
7919 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7920 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7921 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7924 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7925 expensive divide. If not, combine will rebuild the original
7927 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7928 && !real_onep (TREE_OPERAND (exp, 0)))
7929 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7930 build (RDIV_EXPR, type,
7931 build_real (type, dconst1),
7932 TREE_OPERAND (exp, 1))),
7933 target, tmode, unsignedp);
7934 this_optab = sdiv_optab;
7937 case TRUNC_MOD_EXPR:
7938 case FLOOR_MOD_EXPR:
7940 case ROUND_MOD_EXPR:
7941 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7943 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7944 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7945 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7947 case FIX_ROUND_EXPR:
7948 case FIX_FLOOR_EXPR:
7950 abort (); /* Not used for C. */
7952 case FIX_TRUNC_EXPR:
7953 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7955 target = gen_reg_rtx (mode);
7956 expand_fix (target, op0, unsignedp);
7960 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7962 target = gen_reg_rtx (mode);
7963 /* expand_float can't figure out what to do if FROM has VOIDmode.
7964 So give it the correct mode. With -O, cse will optimize this. */
7965 if (GET_MODE (op0) == VOIDmode)
7966 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7968 expand_float (target, op0,
7969 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7973 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7974 temp = expand_unop (mode,
7975 ! unsignedp && flag_trapv
7976 && (GET_MODE_CLASS(mode) == MODE_INT)
7977 ? negv_optab : neg_optab, op0, target, 0);
7983 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7985 /* Handle complex values specially. */
7986 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7987 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7988 return expand_complex_abs (mode, op0, target, unsignedp);
7990 /* Unsigned abs is simply the operand. Testing here means we don't
7991 risk generating incorrect code below. */
7992 if (TREE_UNSIGNED (type))
7995 return expand_abs (mode, op0, target, unsignedp,
7996 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8000 target = original_target;
8001 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8002 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8003 || GET_MODE (target) != mode
8004 || (GET_CODE (target) == REG
8005 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8006 target = gen_reg_rtx (mode);
8007 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8008 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8010 /* First try to do it with a special MIN or MAX instruction.
8011 If that does not win, use a conditional jump to select the proper
8013 this_optab = (TREE_UNSIGNED (type)
8014 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8015 : (code == MIN_EXPR ? smin_optab : smax_optab));
8017 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8022 /* At this point, a MEM target is no longer useful; we will get better
8025 if (GET_CODE (target) == MEM)
8026 target = gen_reg_rtx (mode);
8029 emit_move_insn (target, op0);
8031 op0 = gen_label_rtx ();
8033 /* If this mode is an integer too wide to compare properly,
8034 compare word by word. Rely on cse to optimize constant cases. */
8035 if (GET_MODE_CLASS (mode) == MODE_INT
8036 && ! can_compare_p (GE, mode, ccp_jump))
8038 if (code == MAX_EXPR)
8039 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8040 target, op1, NULL_RTX, op0);
8042 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8043 op1, target, NULL_RTX, op0);
8047 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8048 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8049 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
8052 emit_move_insn (target, op1);
8057 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8058 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8064 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8065 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8070 /* ??? Can optimize bitwise operations with one arg constant.
8071 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8072 and (a bitwise1 b) bitwise2 b (etc)
8073 but that is probably not worth while. */
8075 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8076 boolean values when we want in all cases to compute both of them. In
8077 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8078 as actual zero-or-1 values and then bitwise anding. In cases where
8079 there cannot be any side effects, better code would be made by
8080 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8081 how to recognize those cases. */
8083 case TRUTH_AND_EXPR:
8085 this_optab = and_optab;
8090 this_optab = ior_optab;
8093 case TRUTH_XOR_EXPR:
8095 this_optab = xor_optab;
8102 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8104 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8105 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8108 /* Could determine the answer when only additive constants differ. Also,
8109 the addition of one can be handled by changing the condition. */
8116 case UNORDERED_EXPR:
8123 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8127 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8128 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8130 && GET_CODE (original_target) == REG
8131 && (GET_MODE (original_target)
8132 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8134 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8137 if (temp != original_target)
8138 temp = copy_to_reg (temp);
8140 op1 = gen_label_rtx ();
8141 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8142 GET_MODE (temp), unsignedp, 0, op1);
8143 emit_move_insn (temp, const1_rtx);
8148 /* If no set-flag instruction, must generate a conditional
8149 store into a temporary variable. Drop through
8150 and handle this like && and ||. */
8152 case TRUTH_ANDIF_EXPR:
8153 case TRUTH_ORIF_EXPR:
8155 && (target == 0 || ! safe_from_p (target, exp, 1)
8156 /* Make sure we don't have a hard reg (such as function's return
8157 value) live across basic blocks, if not optimizing. */
8158 || (!optimize && GET_CODE (target) == REG
8159 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8160 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8163 emit_clr_insn (target);
8165 op1 = gen_label_rtx ();
8166 jumpifnot (exp, op1);
8169 emit_0_to_1_insn (target);
8172 return ignore ? const0_rtx : target;
8174 case TRUTH_NOT_EXPR:
8175 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8176 /* The parser is careful to generate TRUTH_NOT_EXPR
8177 only with operands that are always zero or one. */
8178 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8179 target, 1, OPTAB_LIB_WIDEN);
8185 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8187 return expand_expr (TREE_OPERAND (exp, 1),
8188 (ignore ? const0_rtx : target),
8192 /* If we would have a "singleton" (see below) were it not for a
8193 conversion in each arm, bring that conversion back out. */
8194 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8195 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8196 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8197 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8199 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8200 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8202 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8203 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8204 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8205 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8206 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8207 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8208 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8209 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8210 return expand_expr (build1 (NOP_EXPR, type,
8211 build (COND_EXPR, TREE_TYPE (iftrue),
8212 TREE_OPERAND (exp, 0),
8214 target, tmode, modifier);
8218 /* Note that COND_EXPRs whose type is a structure or union
8219 are required to be constructed to contain assignments of
8220 a temporary variable, so that we can evaluate them here
8221 for side effect only. If type is void, we must do likewise. */
8223 /* If an arm of the branch requires a cleanup,
8224 only that cleanup is performed. */
8227 tree binary_op = 0, unary_op = 0;
8229 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8230 convert it to our mode, if necessary. */
8231 if (integer_onep (TREE_OPERAND (exp, 1))
8232 && integer_zerop (TREE_OPERAND (exp, 2))
8233 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8237 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8242 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8243 if (GET_MODE (op0) == mode)
8247 target = gen_reg_rtx (mode);
8248 convert_move (target, op0, unsignedp);
8252 /* Check for X ? A + B : A. If we have this, we can copy A to the
8253 output and conditionally add B. Similarly for unary operations.
8254 Don't do this if X has side-effects because those side effects
8255 might affect A or B and the "?" operation is a sequence point in
8256 ANSI. (operand_equal_p tests for side effects.) */
8258 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8259 && operand_equal_p (TREE_OPERAND (exp, 2),
8260 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8261 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8262 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8263 && operand_equal_p (TREE_OPERAND (exp, 1),
8264 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8265 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8266 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8267 && operand_equal_p (TREE_OPERAND (exp, 2),
8268 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8269 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8270 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8271 && operand_equal_p (TREE_OPERAND (exp, 1),
8272 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8273 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8275 /* If we are not to produce a result, we have no target. Otherwise,
8276 if a target was specified use it; it will not be used as an
8277 intermediate target unless it is safe. If no target, use a
8282 else if (original_target
8283 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8284 || (singleton && GET_CODE (original_target) == REG
8285 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8286 && original_target == var_rtx (singleton)))
8287 && GET_MODE (original_target) == mode
8288 #ifdef HAVE_conditional_move
8289 && (! can_conditionally_move_p (mode)
8290 || GET_CODE (original_target) == REG
8291 || TREE_ADDRESSABLE (type))
8293 && (GET_CODE (original_target) != MEM
8294 || TREE_ADDRESSABLE (type)))
8295 temp = original_target;
8296 else if (TREE_ADDRESSABLE (type))
8299 temp = assign_temp (type, 0, 0, 1);
8301 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8302 do the test of X as a store-flag operation, do this as
8303 A + ((X != 0) << log C). Similarly for other simple binary
8304 operators. Only do for C == 1 if BRANCH_COST is low. */
8305 if (temp && singleton && binary_op
8306 && (TREE_CODE (binary_op) == PLUS_EXPR
8307 || TREE_CODE (binary_op) == MINUS_EXPR
8308 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8309 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8310 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8311 : integer_onep (TREE_OPERAND (binary_op, 1)))
8312 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8315 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8316 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8317 ? addv_optab : add_optab)
8318 : TREE_CODE (binary_op) == MINUS_EXPR
8319 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8320 ? subv_optab : sub_optab)
8321 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8324 /* If we had X ? A : A + 1, do this as A + (X == 0).
8326 We have to invert the truth value here and then put it
8327 back later if do_store_flag fails. We cannot simply copy
8328 TREE_OPERAND (exp, 0) to another variable and modify that
8329 because invert_truthvalue can modify the tree pointed to
8331 if (singleton == TREE_OPERAND (exp, 1))
8332 TREE_OPERAND (exp, 0)
8333 = invert_truthvalue (TREE_OPERAND (exp, 0));
8335 result = do_store_flag (TREE_OPERAND (exp, 0),
8336 (safe_from_p (temp, singleton, 1)
8338 mode, BRANCH_COST <= 1);
8340 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8341 result = expand_shift (LSHIFT_EXPR, mode, result,
8342 build_int_2 (tree_log2
8346 (safe_from_p (temp, singleton, 1)
8347 ? temp : NULL_RTX), 0);
8351 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8352 return expand_binop (mode, boptab, op1, result, temp,
8353 unsignedp, OPTAB_LIB_WIDEN);
8355 else if (singleton == TREE_OPERAND (exp, 1))
8356 TREE_OPERAND (exp, 0)
8357 = invert_truthvalue (TREE_OPERAND (exp, 0));
8360 do_pending_stack_adjust ();
8362 op0 = gen_label_rtx ();
8364 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8368 /* If the target conflicts with the other operand of the
8369 binary op, we can't use it. Also, we can't use the target
8370 if it is a hard register, because evaluating the condition
8371 might clobber it. */
8373 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8374 || (GET_CODE (temp) == REG
8375 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8376 temp = gen_reg_rtx (mode);
8377 store_expr (singleton, temp, 0);
8380 expand_expr (singleton,
8381 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8382 if (singleton == TREE_OPERAND (exp, 1))
8383 jumpif (TREE_OPERAND (exp, 0), op0);
8385 jumpifnot (TREE_OPERAND (exp, 0), op0);
8387 start_cleanup_deferral ();
8388 if (binary_op && temp == 0)
8389 /* Just touch the other operand. */
8390 expand_expr (TREE_OPERAND (binary_op, 1),
8391 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8393 store_expr (build (TREE_CODE (binary_op), type,
8394 make_tree (type, temp),
8395 TREE_OPERAND (binary_op, 1)),
8398 store_expr (build1 (TREE_CODE (unary_op), type,
8399 make_tree (type, temp)),
8403 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8404 comparison operator. If we have one of these cases, set the
8405 output to A, branch on A (cse will merge these two references),
8406 then set the output to FOO. */
8408 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8409 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8410 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8411 TREE_OPERAND (exp, 1), 0)
8412 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8413 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8414 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8416 if (GET_CODE (temp) == REG
8417 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8418 temp = gen_reg_rtx (mode);
8419 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8420 jumpif (TREE_OPERAND (exp, 0), op0);
8422 start_cleanup_deferral ();
8423 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8427 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8428 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8429 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8430 TREE_OPERAND (exp, 2), 0)
8431 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8432 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8433 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8435 if (GET_CODE (temp) == REG
8436 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8437 temp = gen_reg_rtx (mode);
8438 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8439 jumpifnot (TREE_OPERAND (exp, 0), op0);
8441 start_cleanup_deferral ();
8442 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8447 op1 = gen_label_rtx ();
8448 jumpifnot (TREE_OPERAND (exp, 0), op0);
8450 start_cleanup_deferral ();
8452 /* One branch of the cond can be void, if it never returns. For
8453 example A ? throw : E */
8455 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8456 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8458 expand_expr (TREE_OPERAND (exp, 1),
8459 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8460 end_cleanup_deferral ();
8462 emit_jump_insn (gen_jump (op1));
8465 start_cleanup_deferral ();
8467 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8468 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8470 expand_expr (TREE_OPERAND (exp, 2),
8471 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8474 end_cleanup_deferral ();
8485 /* Something needs to be initialized, but we didn't know
8486 where that thing was when building the tree. For example,
8487 it could be the return value of a function, or a parameter
8488 to a function which lays down in the stack, or a temporary
8489 variable which must be passed by reference.
8491 We guarantee that the expression will either be constructed
8492 or copied into our original target. */
8494 tree slot = TREE_OPERAND (exp, 0);
8495 tree cleanups = NULL_TREE;
8498 if (TREE_CODE (slot) != VAR_DECL)
8502 target = original_target;
8504 /* Set this here so that if we get a target that refers to a
8505 register variable that's already been used, put_reg_into_stack
8506 knows that it should fix up those uses. */
8507 TREE_USED (slot) = 1;
8511 if (DECL_RTL_SET_P (slot))
8513 target = DECL_RTL (slot);
8514 /* If we have already expanded the slot, so don't do
8516 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8521 target = assign_temp (type, 2, 0, 1);
8522 /* All temp slots at this level must not conflict. */
8523 preserve_temp_slots (target);
8524 SET_DECL_RTL (slot, target);
8525 if (TREE_ADDRESSABLE (slot))
8526 put_var_into_stack (slot);
8528 /* Since SLOT is not known to the called function
8529 to belong to its stack frame, we must build an explicit
8530 cleanup. This case occurs when we must build up a reference
8531 to pass the reference as an argument. In this case,
8532 it is very likely that such a reference need not be
8535 if (TREE_OPERAND (exp, 2) == 0)
8536 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8537 cleanups = TREE_OPERAND (exp, 2);
8542 /* This case does occur, when expanding a parameter which
8543 needs to be constructed on the stack. The target
8544 is the actual stack address that we want to initialize.
8545 The function we call will perform the cleanup in this case. */
8547 /* If we have already assigned it space, use that space,
8548 not target that we were passed in, as our target
8549 parameter is only a hint. */
8550 if (DECL_RTL_SET_P (slot))
8552 target = DECL_RTL (slot);
8553 /* If we have already expanded the slot, so don't do
8555 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8560 SET_DECL_RTL (slot, target);
8561 /* If we must have an addressable slot, then make sure that
8562 the RTL that we just stored in slot is OK. */
8563 if (TREE_ADDRESSABLE (slot))
8564 put_var_into_stack (slot);
8568 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8569 /* Mark it as expanded. */
8570 TREE_OPERAND (exp, 1) = NULL_TREE;
8572 store_expr (exp1, target, 0);
8574 expand_decl_cleanup (NULL_TREE, cleanups);
8581 tree lhs = TREE_OPERAND (exp, 0);
8582 tree rhs = TREE_OPERAND (exp, 1);
8584 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8590 /* If lhs is complex, expand calls in rhs before computing it.
8591 That's so we don't compute a pointer and save it over a
8592 call. If lhs is simple, compute it first so we can give it
8593 as a target if the rhs is just a call. This avoids an
8594 extra temp and copy and that prevents a partial-subsumption
8595 which makes bad code. Actually we could treat
8596 component_ref's of vars like vars. */
8598 tree lhs = TREE_OPERAND (exp, 0);
8599 tree rhs = TREE_OPERAND (exp, 1);
8603 /* Check for |= or &= of a bitfield of size one into another bitfield
8604 of size 1. In this case, (unless we need the result of the
8605 assignment) we can do this more efficiently with a
8606 test followed by an assignment, if necessary.
8608 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8609 things change so we do, this code should be enhanced to
8612 && TREE_CODE (lhs) == COMPONENT_REF
8613 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8614 || TREE_CODE (rhs) == BIT_AND_EXPR)
8615 && TREE_OPERAND (rhs, 0) == lhs
8616 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8617 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8618 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8620 rtx label = gen_label_rtx ();
8622 do_jump (TREE_OPERAND (rhs, 1),
8623 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8624 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8625 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8626 (TREE_CODE (rhs) == BIT_IOR_EXPR
8628 : integer_zero_node)),
8630 do_pending_stack_adjust ();
8635 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8641 if (!TREE_OPERAND (exp, 0))
8642 expand_null_return ();
8644 expand_return (TREE_OPERAND (exp, 0));
8647 case PREINCREMENT_EXPR:
8648 case PREDECREMENT_EXPR:
8649 return expand_increment (exp, 0, ignore);
8651 case POSTINCREMENT_EXPR:
8652 case POSTDECREMENT_EXPR:
8653 /* Faster to treat as pre-increment if result is not used. */
8654 return expand_increment (exp, ! ignore, ignore);
8657 /* If nonzero, TEMP will be set to the address of something that might
8658 be a MEM corresponding to a stack slot. */
8661 /* Are we taking the address of a nested function? */
8662 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8663 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8664 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8665 && ! TREE_STATIC (exp))
8667 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8668 op0 = force_operand (op0, target);
8670 /* If we are taking the address of something erroneous, just
8672 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8676 /* We make sure to pass const0_rtx down if we came in with
8677 ignore set, to avoid doing the cleanups twice for something. */
8678 op0 = expand_expr (TREE_OPERAND (exp, 0),
8679 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8680 (modifier == EXPAND_INITIALIZER
8681 ? modifier : EXPAND_CONST_ADDRESS));
8683 /* If we are going to ignore the result, OP0 will have been set
8684 to const0_rtx, so just return it. Don't get confused and
8685 think we are taking the address of the constant. */
8689 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8690 clever and returns a REG when given a MEM. */
8691 op0 = protect_from_queue (op0, 1);
8693 /* We would like the object in memory. If it is a constant, we can
8694 have it be statically allocated into memory. For a non-constant,
8695 we need to allocate some memory and store the value into it. */
8697 if (CONSTANT_P (op0))
8698 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8700 else if (GET_CODE (op0) == MEM)
8702 mark_temp_addr_taken (op0);
8703 temp = XEXP (op0, 0);
8706 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8707 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8708 || GET_CODE (op0) == PARALLEL)
8710 /* If this object is in a register, it must be not
8712 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8713 tree nt = build_qualified_type (inner_type,
8714 (TYPE_QUALS (inner_type)
8715 | TYPE_QUAL_CONST));
8716 rtx memloc = assign_temp (nt, 1, 1, 1);
8718 mark_temp_addr_taken (memloc);
8719 if (GET_CODE (op0) == PARALLEL)
8720 /* Handle calls that pass values in multiple non-contiguous
8721 locations. The Irix 6 ABI has examples of this. */
8722 emit_group_store (memloc, op0,
8723 int_size_in_bytes (inner_type),
8724 TYPE_ALIGN (inner_type));
8726 emit_move_insn (memloc, op0);
8730 if (GET_CODE (op0) != MEM)
8733 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8735 temp = XEXP (op0, 0);
8736 #ifdef POINTERS_EXTEND_UNSIGNED
8737 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8738 && mode == ptr_mode)
8739 temp = convert_memory_address (ptr_mode, temp);
8744 op0 = force_operand (XEXP (op0, 0), target);
8747 if (flag_force_addr && GET_CODE (op0) != REG)
8748 op0 = force_reg (Pmode, op0);
8750 if (GET_CODE (op0) == REG
8751 && ! REG_USERVAR_P (op0))
8752 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8754 /* If we might have had a temp slot, add an equivalent address
8757 update_temp_slot_address (temp, op0);
8759 #ifdef POINTERS_EXTEND_UNSIGNED
8760 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8761 && mode == ptr_mode)
8762 op0 = convert_memory_address (ptr_mode, op0);
8767 case ENTRY_VALUE_EXPR:
8770 /* COMPLEX type for Extended Pascal & Fortran */
8773 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8776 /* Get the rtx code of the operands. */
8777 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8778 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8781 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8785 /* Move the real (op0) and imaginary (op1) parts to their location. */
8786 emit_move_insn (gen_realpart (mode, target), op0);
8787 emit_move_insn (gen_imagpart (mode, target), op1);
8789 insns = get_insns ();
8792 /* Complex construction should appear as a single unit. */
8793 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8794 each with a separate pseudo as destination.
8795 It's not correct for flow to treat them as a unit. */
8796 if (GET_CODE (target) != CONCAT)
8797 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8805 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8806 return gen_realpart (mode, op0);
8809 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8810 return gen_imagpart (mode, op0);
8814 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8818 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8821 target = gen_reg_rtx (mode);
8825 /* Store the realpart and the negated imagpart to target. */
8826 emit_move_insn (gen_realpart (partmode, target),
8827 gen_realpart (partmode, op0));
8829 imag_t = gen_imagpart (partmode, target);
8830 temp = expand_unop (partmode,
8831 ! unsignedp && flag_trapv
8832 && (GET_MODE_CLASS(partmode) == MODE_INT)
8833 ? negv_optab : neg_optab,
8834 gen_imagpart (partmode, op0), imag_t, 0);
8836 emit_move_insn (imag_t, temp);
8838 insns = get_insns ();
8841 /* Conjugate should appear as a single unit
8842 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8843 each with a separate pseudo as destination.
8844 It's not correct for flow to treat them as a unit. */
8845 if (GET_CODE (target) != CONCAT)
8846 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8853 case TRY_CATCH_EXPR:
8855 tree handler = TREE_OPERAND (exp, 1);
8857 expand_eh_region_start ();
8859 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8861 expand_eh_region_end_cleanup (handler);
8866 case TRY_FINALLY_EXPR:
8868 tree try_block = TREE_OPERAND (exp, 0);
8869 tree finally_block = TREE_OPERAND (exp, 1);
8870 rtx finally_label = gen_label_rtx ();
8871 rtx done_label = gen_label_rtx ();
8872 rtx return_link = gen_reg_rtx (Pmode);
8873 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8874 (tree) finally_label, (tree) return_link);
8875 TREE_SIDE_EFFECTS (cleanup) = 1;
8877 /* Start a new binding layer that will keep track of all cleanup
8878 actions to be performed. */
8879 expand_start_bindings (2);
8881 target_temp_slot_level = temp_slot_level;
8883 expand_decl_cleanup (NULL_TREE, cleanup);
8884 op0 = expand_expr (try_block, target, tmode, modifier);
8886 preserve_temp_slots (op0);
8887 expand_end_bindings (NULL_TREE, 0, 0);
8888 emit_jump (done_label);
8889 emit_label (finally_label);
8890 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8891 emit_indirect_jump (return_link);
8892 emit_label (done_label);
8896 case GOTO_SUBROUTINE_EXPR:
8898 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8899 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8900 rtx return_address = gen_label_rtx ();
8901 emit_move_insn (return_link,
8902 gen_rtx_LABEL_REF (Pmode, return_address));
8904 emit_label (return_address);
8909 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8912 return get_exception_pointer (cfun);
8915 /* Function descriptors are not valid except for as
8916 initialization constants, and should not be expanded. */
8920 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8923 /* Here to do an ordinary binary operator, generating an instruction
8924 from the optab already placed in `this_optab'. */
8926 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8928 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8929 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8931 temp = expand_binop (mode, this_optab, op0, op1, target,
8932 unsignedp, OPTAB_LIB_WIDEN);
8938 /* Similar to expand_expr, except that we don't specify a target, target
8939 mode, or modifier and we return the alignment of the inner type. This is
8940 used in cases where it is not necessary to align the result to the
8941 alignment of its type as long as we know the alignment of the result, for
8942 example for comparisons of BLKmode values. */
8945 expand_expr_unaligned (exp, palign)
8947 unsigned int *palign;
8950 tree type = TREE_TYPE (exp);
8951 enum machine_mode mode = TYPE_MODE (type);
8953 /* Default the alignment we return to that of the type. */
8954 *palign = TYPE_ALIGN (type);
8956 /* The only cases in which we do anything special is if the resulting mode
8958 if (mode != BLKmode)
8959 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8961 switch (TREE_CODE (exp))
8965 case NON_LVALUE_EXPR:
8966 /* Conversions between BLKmode values don't change the underlying
8967 alignment or value. */
8968 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8969 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8973 /* Much of the code for this case is copied directly from expand_expr.
8974 We need to duplicate it here because we will do something different
8975 in the fall-through case, so we need to handle the same exceptions
8978 tree array = TREE_OPERAND (exp, 0);
8979 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8980 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8981 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8984 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8987 /* Optimize the special-case of a zero lower bound.
8989 We convert the low_bound to sizetype to avoid some problems
8990 with constant folding. (E.g. suppose the lower bound is 1,
8991 and its mode is QI. Without the conversion, (ARRAY
8992 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8993 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8995 if (! integer_zerop (low_bound))
8996 index = size_diffop (index, convert (sizetype, low_bound));
8998 /* If this is a constant index into a constant array,
8999 just get the value from the array. Handle both the cases when
9000 we have an explicit constructor and when our operand is a variable
9001 that was declared const. */
9003 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
9004 && host_integerp (index, 0)
9005 && 0 > compare_tree_int (index,
9006 list_length (CONSTRUCTOR_ELTS
9007 (TREE_OPERAND (exp, 0)))))
9011 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
9012 i = tree_low_cst (index, 0);
9013 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
9017 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
9020 else if (optimize >= 1
9021 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9022 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
9023 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
9025 if (TREE_CODE (index) == INTEGER_CST)
9027 tree init = DECL_INITIAL (array);
9029 if (TREE_CODE (init) == CONSTRUCTOR)
9033 for (elem = CONSTRUCTOR_ELTS (init);
9034 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
9035 elem = TREE_CHAIN (elem))
9039 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
9049 case ARRAY_RANGE_REF:
9050 /* If the operand is a CONSTRUCTOR, we can just extract the
9051 appropriate field if it is present. Don't do this if we have
9052 already written the data since we want to refer to that copy
9053 and varasm.c assumes that's what we'll do. */
9054 if (TREE_CODE (exp) == COMPONENT_REF
9055 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9056 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
9060 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
9061 elt = TREE_CHAIN (elt))
9062 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
9063 /* Note that unlike the case in expand_expr, we know this is
9064 BLKmode and hence not an integer. */
9065 return expand_expr_unaligned (TREE_VALUE (elt), palign);
9069 enum machine_mode mode1;
9070 HOST_WIDE_INT bitsize, bitpos;
9073 unsigned int alignment;
9075 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9076 &mode1, &unsignedp, &volatilep,
9079 /* If we got back the original object, something is wrong. Perhaps
9080 we are evaluating an expression too early. In any event, don't
9081 infinitely recurse. */
9085 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9087 /* If this is a constant, put it into a register if it is a
9088 legitimate constant and OFFSET is 0 and memory if it isn't. */
9089 if (CONSTANT_P (op0))
9091 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9093 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9095 op0 = force_reg (inner_mode, op0);
9097 op0 = validize_mem (force_const_mem (inner_mode, op0));
9102 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9104 /* If this object is in a register, put it into memory.
9105 This case can't occur in C, but can in Ada if we have
9106 unchecked conversion of an expression from a scalar type to
9107 an array or record type. */
9108 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9109 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9111 tree nt = build_qualified_type (TREE_TYPE (tem),
9112 (TYPE_QUALS (TREE_TYPE (tem))
9113 | TYPE_QUAL_CONST));
9114 rtx memloc = assign_temp (nt, 1, 1, 1);
9116 mark_temp_addr_taken (memloc);
9117 emit_move_insn (memloc, op0);
9121 if (GET_CODE (op0) != MEM)
9124 if (GET_MODE (offset_rtx) != ptr_mode)
9125 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9127 #ifdef POINTERS_EXTEND_UNSIGNED
9128 if (GET_MODE (offset_rtx) != Pmode)
9129 offset_rtx = convert_memory_address (Pmode, offset_rtx);
9132 op0 = offset_address (op0, offset_rtx,
9133 highest_pow2_factor (offset));
9136 /* Don't forget about volatility even if this is a bitfield. */
9137 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9139 op0 = copy_rtx (op0);
9140 MEM_VOLATILE_P (op0) = 1;
9143 /* Check the access. */
9144 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9149 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9150 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9152 /* Check the access right of the pointer. */
9153 in_check_memory_usage = 1;
9154 if (size > BITS_PER_UNIT)
9155 emit_library_call (chkr_check_addr_libfunc,
9156 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9157 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9158 TYPE_MODE (sizetype),
9159 GEN_INT (MEMORY_USE_RO),
9160 TYPE_MODE (integer_type_node));
9161 in_check_memory_usage = 0;
9164 /* In cases where an aligned union has an unaligned object
9165 as a field, we might be extracting a BLKmode value from
9166 an integer-mode (e.g., SImode) object. Handle this case
9167 by doing the extract into an object as wide as the field
9168 (which we know to be the width of a basic mode), then
9169 storing into memory, and changing the mode to BLKmode.
9170 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9171 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9172 if (mode1 == VOIDmode
9173 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9174 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9175 && (TYPE_ALIGN (type) > alignment
9176 || bitpos % TYPE_ALIGN (type) != 0)))
9178 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9180 if (ext_mode == BLKmode)
9182 /* In this case, BITPOS must start at a byte boundary. */
9183 if (GET_CODE (op0) != MEM
9184 || bitpos % BITS_PER_UNIT != 0)
9187 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9191 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9193 rtx new = assign_temp (nt, 0, 1, 1);
9195 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9196 unsignedp, NULL_RTX, ext_mode,
9197 ext_mode, alignment,
9198 int_size_in_bytes (TREE_TYPE (tem)));
9200 /* If the result is a record type and BITSIZE is narrower than
9201 the mode of OP0, an integral mode, and this is a big endian
9202 machine, we must put the field into the high-order bits. */
9203 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9204 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9205 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9206 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9207 size_int (GET_MODE_BITSIZE
9212 emit_move_insn (new, op0);
9213 op0 = copy_rtx (new);
9214 PUT_MODE (op0, BLKmode);
9218 /* Get a reference to just this component. */
9219 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9221 set_mem_attributes (op0, exp, 0);
9223 /* Adjust the alignment in case the bit position is not
9224 a multiple of the alignment of the inner object. */
9225 while (bitpos % alignment != 0)
9228 if (GET_CODE (XEXP (op0, 0)) == REG)
9229 mark_reg_pointer (XEXP (op0, 0), alignment);
9231 MEM_IN_STRUCT_P (op0) = 1;
9232 MEM_VOLATILE_P (op0) |= volatilep;
9234 *palign = alignment;
9243 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9246 /* Return the tree node if a ARG corresponds to a string constant or zero
9247 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9248 in bytes within the string that ARG is accessing. The type of the
9249 offset will be `sizetype'. */
9252 string_constant (arg, ptr_offset)
9258 if (TREE_CODE (arg) == ADDR_EXPR
9259 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9261 *ptr_offset = size_zero_node;
9262 return TREE_OPERAND (arg, 0);
9264 else if (TREE_CODE (arg) == PLUS_EXPR)
9266 tree arg0 = TREE_OPERAND (arg, 0);
9267 tree arg1 = TREE_OPERAND (arg, 1);
9272 if (TREE_CODE (arg0) == ADDR_EXPR
9273 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9275 *ptr_offset = convert (sizetype, arg1);
9276 return TREE_OPERAND (arg0, 0);
9278 else if (TREE_CODE (arg1) == ADDR_EXPR
9279 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9281 *ptr_offset = convert (sizetype, arg0);
9282 return TREE_OPERAND (arg1, 0);
9289 /* Expand code for a post- or pre- increment or decrement
9290 and return the RTX for the result.
9291 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9294 expand_increment (exp, post, ignore)
9300 tree incremented = TREE_OPERAND (exp, 0);
9301 optab this_optab = add_optab;
9303 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9304 int op0_is_copy = 0;
9305 int single_insn = 0;
9306 /* 1 means we can't store into OP0 directly,
9307 because it is a subreg narrower than a word,
9308 and we don't dare clobber the rest of the word. */
9311 /* Stabilize any component ref that might need to be
9312 evaluated more than once below. */
9314 || TREE_CODE (incremented) == BIT_FIELD_REF
9315 || (TREE_CODE (incremented) == COMPONENT_REF
9316 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9317 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9318 incremented = stabilize_reference (incremented);
9319 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9320 ones into save exprs so that they don't accidentally get evaluated
9321 more than once by the code below. */
9322 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9323 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9324 incremented = save_expr (incremented);
9326 /* Compute the operands as RTX.
9327 Note whether OP0 is the actual lvalue or a copy of it:
9328 I believe it is a copy iff it is a register or subreg
9329 and insns were generated in computing it. */
9331 temp = get_last_insn ();
9332 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9334 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9335 in place but instead must do sign- or zero-extension during assignment,
9336 so we copy it into a new register and let the code below use it as
9339 Note that we can safely modify this SUBREG since it is know not to be
9340 shared (it was made by the expand_expr call above). */
9342 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9345 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9349 else if (GET_CODE (op0) == SUBREG
9350 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9352 /* We cannot increment this SUBREG in place. If we are
9353 post-incrementing, get a copy of the old value. Otherwise,
9354 just mark that we cannot increment in place. */
9356 op0 = copy_to_reg (op0);
9361 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9362 && temp != get_last_insn ());
9363 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9364 EXPAND_MEMORY_USE_BAD);
9366 /* Decide whether incrementing or decrementing. */
9367 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9368 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9369 this_optab = sub_optab;
9371 /* Convert decrement by a constant into a negative increment. */
9372 if (this_optab == sub_optab
9373 && GET_CODE (op1) == CONST_INT)
9375 op1 = GEN_INT (-INTVAL (op1));
9376 this_optab = add_optab;
9379 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9380 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9382 /* For a preincrement, see if we can do this with a single instruction. */
9385 icode = (int) this_optab->handlers[(int) mode].insn_code;
9386 if (icode != (int) CODE_FOR_nothing
9387 /* Make sure that OP0 is valid for operands 0 and 1
9388 of the insn we want to queue. */
9389 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9390 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9391 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9395 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9396 then we cannot just increment OP0. We must therefore contrive to
9397 increment the original value. Then, for postincrement, we can return
9398 OP0 since it is a copy of the old value. For preincrement, expand here
9399 unless we can do it with a single insn.
9401 Likewise if storing directly into OP0 would clobber high bits
9402 we need to preserve (bad_subreg). */
9403 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9405 /* This is the easiest way to increment the value wherever it is.
9406 Problems with multiple evaluation of INCREMENTED are prevented
9407 because either (1) it is a component_ref or preincrement,
9408 in which case it was stabilized above, or (2) it is an array_ref
9409 with constant index in an array in a register, which is
9410 safe to reevaluate. */
9411 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9412 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9413 ? MINUS_EXPR : PLUS_EXPR),
9416 TREE_OPERAND (exp, 1));
9418 while (TREE_CODE (incremented) == NOP_EXPR
9419 || TREE_CODE (incremented) == CONVERT_EXPR)
9421 newexp = convert (TREE_TYPE (incremented), newexp);
9422 incremented = TREE_OPERAND (incremented, 0);
9425 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9426 return post ? op0 : temp;
9431 /* We have a true reference to the value in OP0.
9432 If there is an insn to add or subtract in this mode, queue it.
9433 Queueing the increment insn avoids the register shuffling
9434 that often results if we must increment now and first save
9435 the old value for subsequent use. */
9437 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9438 op0 = stabilize (op0);
9441 icode = (int) this_optab->handlers[(int) mode].insn_code;
9442 if (icode != (int) CODE_FOR_nothing
9443 /* Make sure that OP0 is valid for operands 0 and 1
9444 of the insn we want to queue. */
9445 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9446 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9448 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9449 op1 = force_reg (mode, op1);
9451 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9453 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9455 rtx addr = (general_operand (XEXP (op0, 0), mode)
9456 ? force_reg (Pmode, XEXP (op0, 0))
9457 : copy_to_reg (XEXP (op0, 0)));
9460 op0 = replace_equiv_address (op0, addr);
9461 temp = force_reg (GET_MODE (op0), op0);
9462 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9463 op1 = force_reg (mode, op1);
9465 /* The increment queue is LIFO, thus we have to `queue'
9466 the instructions in reverse order. */
9467 enqueue_insn (op0, gen_move_insn (op0, temp));
9468 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9473 /* Preincrement, or we can't increment with one simple insn. */
9475 /* Save a copy of the value before inc or dec, to return it later. */
9476 temp = value = copy_to_reg (op0);
9478 /* Arrange to return the incremented value. */
9479 /* Copy the rtx because expand_binop will protect from the queue,
9480 and the results of that would be invalid for us to return
9481 if our caller does emit_queue before using our result. */
9482 temp = copy_rtx (value = op0);
9484 /* Increment however we can. */
9485 op1 = expand_binop (mode, this_optab, value, op1,
9486 current_function_check_memory_usage ? NULL_RTX : op0,
9487 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9488 /* Make sure the value is stored into OP0. */
9490 emit_move_insn (op0, op1);
9495 /* At the start of a function, record that we have no previously-pushed
9496 arguments waiting to be popped. */
9499 init_pending_stack_adjust ()
9501 pending_stack_adjust = 0;
9504 /* When exiting from function, if safe, clear out any pending stack adjust
9505 so the adjustment won't get done.
9507 Note, if the current function calls alloca, then it must have a
9508 frame pointer regardless of the value of flag_omit_frame_pointer. */
9511 clear_pending_stack_adjust ()
9513 #ifdef EXIT_IGNORE_STACK
9515 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9516 && EXIT_IGNORE_STACK
9517 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9518 && ! flag_inline_functions)
9520 stack_pointer_delta -= pending_stack_adjust,
9521 pending_stack_adjust = 0;
9526 /* Pop any previously-pushed arguments that have not been popped yet. */
9529 do_pending_stack_adjust ()
9531 if (inhibit_defer_pop == 0)
9533 if (pending_stack_adjust != 0)
9534 adjust_stack (GEN_INT (pending_stack_adjust));
9535 pending_stack_adjust = 0;
9539 /* Expand conditional expressions. */
9541 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9542 LABEL is an rtx of code CODE_LABEL, in this function and all the
9546 jumpifnot (exp, label)
9550 do_jump (exp, label, NULL_RTX);
9553 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9560 do_jump (exp, NULL_RTX, label);
9563 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9564 the result is zero, or IF_TRUE_LABEL if the result is one.
9565 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9566 meaning fall through in that case.
9568 do_jump always does any pending stack adjust except when it does not
9569 actually perform a jump. An example where there is no jump
9570 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9572 This function is responsible for optimizing cases such as
9573 &&, || and comparison operators in EXP. */
9576 do_jump (exp, if_false_label, if_true_label)
9578 rtx if_false_label, if_true_label;
9580 enum tree_code code = TREE_CODE (exp);
9581 /* Some cases need to create a label to jump to
9582 in order to properly fall through.
9583 These cases set DROP_THROUGH_LABEL nonzero. */
9584 rtx drop_through_label = 0;
9588 enum machine_mode mode;
9590 #ifdef MAX_INTEGER_COMPUTATION_MODE
9591 check_max_integer_computation_mode (exp);
9602 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9608 /* This is not true with #pragma weak */
9610 /* The address of something can never be zero. */
9612 emit_jump (if_true_label);
9617 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9618 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9619 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9620 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9623 /* If we are narrowing the operand, we have to do the compare in the
9625 if ((TYPE_PRECISION (TREE_TYPE (exp))
9626 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9628 case NON_LVALUE_EXPR:
9629 case REFERENCE_EXPR:
9634 /* These cannot change zero->non-zero or vice versa. */
9635 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9638 case WITH_RECORD_EXPR:
9639 /* Put the object on the placeholder list, recurse through our first
9640 operand, and pop the list. */
9641 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9643 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9644 placeholder_list = TREE_CHAIN (placeholder_list);
9648 /* This is never less insns than evaluating the PLUS_EXPR followed by
9649 a test and can be longer if the test is eliminated. */
9651 /* Reduce to minus. */
9652 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9653 TREE_OPERAND (exp, 0),
9654 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9655 TREE_OPERAND (exp, 1))));
9656 /* Process as MINUS. */
9660 /* Non-zero iff operands of minus differ. */
9661 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9662 TREE_OPERAND (exp, 0),
9663 TREE_OPERAND (exp, 1)),
9664 NE, NE, if_false_label, if_true_label);
9668 /* If we are AND'ing with a small constant, do this comparison in the
9669 smallest type that fits. If the machine doesn't have comparisons
9670 that small, it will be converted back to the wider comparison.
9671 This helps if we are testing the sign bit of a narrower object.
9672 combine can't do this for us because it can't know whether a
9673 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9675 if (! SLOW_BYTE_ACCESS
9676 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9677 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9678 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9679 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9680 && (type = type_for_mode (mode, 1)) != 0
9681 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9682 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9683 != CODE_FOR_nothing))
9685 do_jump (convert (type, exp), if_false_label, if_true_label);
9690 case TRUTH_NOT_EXPR:
9691 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9694 case TRUTH_ANDIF_EXPR:
9695 if (if_false_label == 0)
9696 if_false_label = drop_through_label = gen_label_rtx ();
9697 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9698 start_cleanup_deferral ();
9699 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9700 end_cleanup_deferral ();
9703 case TRUTH_ORIF_EXPR:
9704 if (if_true_label == 0)
9705 if_true_label = drop_through_label = gen_label_rtx ();
9706 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9707 start_cleanup_deferral ();
9708 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9709 end_cleanup_deferral ();
9714 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9715 preserve_temp_slots (NULL_RTX);
9719 do_pending_stack_adjust ();
9720 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9726 case ARRAY_RANGE_REF:
9728 HOST_WIDE_INT bitsize, bitpos;
9730 enum machine_mode mode;
9734 unsigned int alignment;
9736 /* Get description of this reference. We don't actually care
9737 about the underlying object here. */
9738 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9739 &unsignedp, &volatilep, &alignment);
9741 type = type_for_size (bitsize, unsignedp);
9742 if (! SLOW_BYTE_ACCESS
9743 && type != 0 && bitsize >= 0
9744 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9745 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9746 != CODE_FOR_nothing))
9748 do_jump (convert (type, exp), if_false_label, if_true_label);
9755 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9756 if (integer_onep (TREE_OPERAND (exp, 1))
9757 && integer_zerop (TREE_OPERAND (exp, 2)))
9758 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9760 else if (integer_zerop (TREE_OPERAND (exp, 1))
9761 && integer_onep (TREE_OPERAND (exp, 2)))
9762 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9766 rtx label1 = gen_label_rtx ();
9767 drop_through_label = gen_label_rtx ();
9769 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9771 start_cleanup_deferral ();
9772 /* Now the THEN-expression. */
9773 do_jump (TREE_OPERAND (exp, 1),
9774 if_false_label ? if_false_label : drop_through_label,
9775 if_true_label ? if_true_label : drop_through_label);
9776 /* In case the do_jump just above never jumps. */
9777 do_pending_stack_adjust ();
9778 emit_label (label1);
9780 /* Now the ELSE-expression. */
9781 do_jump (TREE_OPERAND (exp, 2),
9782 if_false_label ? if_false_label : drop_through_label,
9783 if_true_label ? if_true_label : drop_through_label);
9784 end_cleanup_deferral ();
9790 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9792 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9793 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9795 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9796 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9799 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9800 fold (build (EQ_EXPR, TREE_TYPE (exp),
9801 fold (build1 (REALPART_EXPR,
9802 TREE_TYPE (inner_type),
9804 fold (build1 (REALPART_EXPR,
9805 TREE_TYPE (inner_type),
9807 fold (build (EQ_EXPR, TREE_TYPE (exp),
9808 fold (build1 (IMAGPART_EXPR,
9809 TREE_TYPE (inner_type),
9811 fold (build1 (IMAGPART_EXPR,
9812 TREE_TYPE (inner_type),
9814 if_false_label, if_true_label);
9817 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9818 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9820 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9821 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9822 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9824 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9830 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9832 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9833 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9835 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9836 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9839 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9840 fold (build (NE_EXPR, TREE_TYPE (exp),
9841 fold (build1 (REALPART_EXPR,
9842 TREE_TYPE (inner_type),
9844 fold (build1 (REALPART_EXPR,
9845 TREE_TYPE (inner_type),
9847 fold (build (NE_EXPR, TREE_TYPE (exp),
9848 fold (build1 (IMAGPART_EXPR,
9849 TREE_TYPE (inner_type),
9851 fold (build1 (IMAGPART_EXPR,
9852 TREE_TYPE (inner_type),
9854 if_false_label, if_true_label);
9857 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9858 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9860 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9861 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9862 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9864 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9869 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9870 if (GET_MODE_CLASS (mode) == MODE_INT
9871 && ! can_compare_p (LT, mode, ccp_jump))
9872 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9874 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9878 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9879 if (GET_MODE_CLASS (mode) == MODE_INT
9880 && ! can_compare_p (LE, mode, ccp_jump))
9881 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9883 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9887 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9888 if (GET_MODE_CLASS (mode) == MODE_INT
9889 && ! can_compare_p (GT, mode, ccp_jump))
9890 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9892 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9896 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9897 if (GET_MODE_CLASS (mode) == MODE_INT
9898 && ! can_compare_p (GE, mode, ccp_jump))
9899 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9901 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9904 case UNORDERED_EXPR:
9907 enum rtx_code cmp, rcmp;
9910 if (code == UNORDERED_EXPR)
9911 cmp = UNORDERED, rcmp = ORDERED;
9913 cmp = ORDERED, rcmp = UNORDERED;
9914 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9917 if (! can_compare_p (cmp, mode, ccp_jump)
9918 && (can_compare_p (rcmp, mode, ccp_jump)
9919 /* If the target doesn't provide either UNORDERED or ORDERED
9920 comparisons, canonicalize on UNORDERED for the library. */
9921 || rcmp == UNORDERED))
9925 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9927 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9932 enum rtx_code rcode1;
9933 enum tree_code tcode2;
9957 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9958 if (can_compare_p (rcode1, mode, ccp_jump))
9959 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9963 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9964 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9967 /* If the target doesn't support combined unordered
9968 compares, decompose into UNORDERED + comparison. */
9969 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9970 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9971 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9972 do_jump (exp, if_false_label, if_true_label);
9978 __builtin_expect (<test>, 0) and
9979 __builtin_expect (<test>, 1)
9981 We need to do this here, so that <test> is not converted to a SCC
9982 operation on machines that use condition code registers and COMPARE
9983 like the PowerPC, and then the jump is done based on whether the SCC
9984 operation produced a 1 or 0. */
9986 /* Check for a built-in function. */
9987 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9989 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9990 tree arglist = TREE_OPERAND (exp, 1);
9992 if (TREE_CODE (fndecl) == FUNCTION_DECL
9993 && DECL_BUILT_IN (fndecl)
9994 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9995 && arglist != NULL_TREE
9996 && TREE_CHAIN (arglist) != NULL_TREE)
9998 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10001 if (seq != NULL_RTX)
10008 /* fall through and generate the normal code. */
10012 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10014 /* This is not needed any more and causes poor code since it causes
10015 comparisons and tests from non-SI objects to have different code
10017 /* Copy to register to avoid generating bad insns by cse
10018 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10019 if (!cse_not_expected && GET_CODE (temp) == MEM)
10020 temp = copy_to_reg (temp);
10022 do_pending_stack_adjust ();
10023 /* Do any postincrements in the expression that was tested. */
10026 if (GET_CODE (temp) == CONST_INT
10027 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10028 || GET_CODE (temp) == LABEL_REF)
10030 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10032 emit_jump (target);
10034 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10035 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10036 /* Note swapping the labels gives us not-equal. */
10037 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10038 else if (GET_MODE (temp) != VOIDmode)
10039 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10040 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10041 GET_MODE (temp), NULL_RTX, 0,
10042 if_false_label, if_true_label);
10047 if (drop_through_label)
10049 /* If do_jump produces code that might be jumped around,
10050 do any stack adjusts from that code, before the place
10051 where control merges in. */
10052 do_pending_stack_adjust ();
10053 emit_label (drop_through_label);
10057 /* Given a comparison expression EXP for values too wide to be compared
10058 with one insn, test the comparison and jump to the appropriate label.
10059 The code of EXP is ignored; we always test GT if SWAP is 0,
10060 and LT if SWAP is 1. */
10063 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10066 rtx if_false_label, if_true_label;
10068 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10069 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10070 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10071 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10073 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10076 /* Compare OP0 with OP1, word at a time, in mode MODE.
10077 UNSIGNEDP says to do unsigned comparison.
10078 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10081 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10082 enum machine_mode mode;
10085 rtx if_false_label, if_true_label;
10087 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10088 rtx drop_through_label = 0;
10091 if (! if_true_label || ! if_false_label)
10092 drop_through_label = gen_label_rtx ();
10093 if (! if_true_label)
10094 if_true_label = drop_through_label;
10095 if (! if_false_label)
10096 if_false_label = drop_through_label;
10098 /* Compare a word at a time, high order first. */
10099 for (i = 0; i < nwords; i++)
10101 rtx op0_word, op1_word;
10103 if (WORDS_BIG_ENDIAN)
10105 op0_word = operand_subword_force (op0, i, mode);
10106 op1_word = operand_subword_force (op1, i, mode);
10110 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10111 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10114 /* All but high-order word must be compared as unsigned. */
10115 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10116 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10117 NULL_RTX, if_true_label);
10119 /* Consider lower words only if these are equal. */
10120 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10121 NULL_RTX, 0, NULL_RTX, if_false_label);
10124 if (if_false_label)
10125 emit_jump (if_false_label);
10126 if (drop_through_label)
10127 emit_label (drop_through_label);
10130 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10131 with one insn, test the comparison and jump to the appropriate label. */
10134 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10136 rtx if_false_label, if_true_label;
10138 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10139 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10140 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10141 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10143 rtx drop_through_label = 0;
10145 if (! if_false_label)
10146 drop_through_label = if_false_label = gen_label_rtx ();
10148 for (i = 0; i < nwords; i++)
10149 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10150 operand_subword_force (op1, i, mode),
10151 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10152 word_mode, NULL_RTX, 0, if_false_label,
10156 emit_jump (if_true_label);
10157 if (drop_through_label)
10158 emit_label (drop_through_label);
10161 /* Jump according to whether OP0 is 0.
10162 We assume that OP0 has an integer mode that is too wide
10163 for the available compare insns. */
10166 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10168 rtx if_false_label, if_true_label;
10170 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10173 rtx drop_through_label = 0;
10175 /* The fastest way of doing this comparison on almost any machine is to
10176 "or" all the words and compare the result. If all have to be loaded
10177 from memory and this is a very wide item, it's possible this may
10178 be slower, but that's highly unlikely. */
10180 part = gen_reg_rtx (word_mode);
10181 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10182 for (i = 1; i < nwords && part != 0; i++)
10183 part = expand_binop (word_mode, ior_optab, part,
10184 operand_subword_force (op0, i, GET_MODE (op0)),
10185 part, 1, OPTAB_WIDEN);
10189 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10190 NULL_RTX, 0, if_false_label, if_true_label);
10195 /* If we couldn't do the "or" simply, do this with a series of compares. */
10196 if (! if_false_label)
10197 drop_through_label = if_false_label = gen_label_rtx ();
10199 for (i = 0; i < nwords; i++)
10200 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10201 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10202 if_false_label, NULL_RTX);
10205 emit_jump (if_true_label);
10207 if (drop_through_label)
10208 emit_label (drop_through_label);
10211 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10212 (including code to compute the values to be compared)
10213 and set (CC0) according to the result.
10214 The decision as to signed or unsigned comparison must be made by the caller.
10216 We force a stack adjustment unless there are currently
10217 things pushed on the stack that aren't yet used.
10219 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10222 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10223 size of MODE should be used. */
10226 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10228 enum rtx_code code;
10230 enum machine_mode mode;
10232 unsigned int align;
10236 /* If one operand is constant, make it the second one. Only do this
10237 if the other operand is not constant as well. */
10239 if (swap_commutative_operands_p (op0, op1))
10244 code = swap_condition (code);
10247 if (flag_force_mem)
10249 op0 = force_not_mem (op0);
10250 op1 = force_not_mem (op1);
10253 do_pending_stack_adjust ();
10255 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10256 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10260 /* There's no need to do this now that combine.c can eliminate lots of
10261 sign extensions. This can be less efficient in certain cases on other
10264 /* If this is a signed equality comparison, we can do it as an
10265 unsigned comparison since zero-extension is cheaper than sign
10266 extension and comparisons with zero are done as unsigned. This is
10267 the case even on machines that can do fast sign extension, since
10268 zero-extension is easier to combine with other operations than
10269 sign-extension is. If we are comparing against a constant, we must
10270 convert it to what it would look like unsigned. */
10271 if ((code == EQ || code == NE) && ! unsignedp
10272 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10274 if (GET_CODE (op1) == CONST_INT
10275 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10276 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10281 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10283 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10286 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10287 The decision as to signed or unsigned comparison must be made by the caller.
10289 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10292 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10293 size of MODE should be used. */
10296 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10297 if_false_label, if_true_label)
10299 enum rtx_code code;
10301 enum machine_mode mode;
10303 unsigned int align;
10304 rtx if_false_label, if_true_label;
10307 int dummy_true_label = 0;
10309 /* Reverse the comparison if that is safe and we want to jump if it is
10311 if (! if_true_label && ! FLOAT_MODE_P (mode))
10313 if_true_label = if_false_label;
10314 if_false_label = 0;
10315 code = reverse_condition (code);
10318 /* If one operand is constant, make it the second one. Only do this
10319 if the other operand is not constant as well. */
10321 if (swap_commutative_operands_p (op0, op1))
10326 code = swap_condition (code);
10329 if (flag_force_mem)
10331 op0 = force_not_mem (op0);
10332 op1 = force_not_mem (op1);
10335 do_pending_stack_adjust ();
10337 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10338 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10340 if (tem == const_true_rtx)
10343 emit_jump (if_true_label);
10347 if (if_false_label)
10348 emit_jump (if_false_label);
10354 /* There's no need to do this now that combine.c can eliminate lots of
10355 sign extensions. This can be less efficient in certain cases on other
10358 /* If this is a signed equality comparison, we can do it as an
10359 unsigned comparison since zero-extension is cheaper than sign
10360 extension and comparisons with zero are done as unsigned. This is
10361 the case even on machines that can do fast sign extension, since
10362 zero-extension is easier to combine with other operations than
10363 sign-extension is. If we are comparing against a constant, we must
10364 convert it to what it would look like unsigned. */
10365 if ((code == EQ || code == NE) && ! unsignedp
10366 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10368 if (GET_CODE (op1) == CONST_INT
10369 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10370 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10375 if (! if_true_label)
10377 dummy_true_label = 1;
10378 if_true_label = gen_label_rtx ();
10381 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10384 if (if_false_label)
10385 emit_jump (if_false_label);
10386 if (dummy_true_label)
10387 emit_label (if_true_label);
10390 /* Generate code for a comparison expression EXP (including code to compute
10391 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10392 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10393 generated code will drop through.
10394 SIGNED_CODE should be the rtx operation for this comparison for
10395 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10397 We force a stack adjustment unless there are currently
10398 things pushed on the stack that aren't yet used. */
10401 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10404 enum rtx_code signed_code, unsigned_code;
10405 rtx if_false_label, if_true_label;
10407 unsigned int align0, align1;
10410 enum machine_mode mode;
10412 enum rtx_code code;
10414 /* Don't crash if the comparison was erroneous. */
10415 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10416 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10419 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10420 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10423 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10424 mode = TYPE_MODE (type);
10425 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10426 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10427 || (GET_MODE_BITSIZE (mode)
10428 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10431 /* op0 might have been replaced by promoted constant, in which
10432 case the type of second argument should be used. */
10433 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10434 mode = TYPE_MODE (type);
10436 unsignedp = TREE_UNSIGNED (type);
10437 code = unsignedp ? unsigned_code : signed_code;
10439 #ifdef HAVE_canonicalize_funcptr_for_compare
10440 /* If function pointers need to be "canonicalized" before they can
10441 be reliably compared, then canonicalize them. */
10442 if (HAVE_canonicalize_funcptr_for_compare
10443 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10444 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10447 rtx new_op0 = gen_reg_rtx (mode);
10449 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10453 if (HAVE_canonicalize_funcptr_for_compare
10454 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10455 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10458 rtx new_op1 = gen_reg_rtx (mode);
10460 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10465 /* Do any postincrements in the expression that was tested. */
10468 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10470 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10471 MIN (align0, align1),
10472 if_false_label, if_true_label);
10475 /* Generate code to calculate EXP using a store-flag instruction
10476 and return an rtx for the result. EXP is either a comparison
10477 or a TRUTH_NOT_EXPR whose operand is a comparison.
10479 If TARGET is nonzero, store the result there if convenient.
10481 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10484 Return zero if there is no suitable set-flag instruction
10485 available on this machine.
10487 Once expand_expr has been called on the arguments of the comparison,
10488 we are committed to doing the store flag, since it is not safe to
10489 re-evaluate the expression. We emit the store-flag insn by calling
10490 emit_store_flag, but only expand the arguments if we have a reason
10491 to believe that emit_store_flag will be successful. If we think that
10492 it will, but it isn't, we have to simulate the store-flag with a
10493 set/jump/set sequence. */
10496 do_store_flag (exp, target, mode, only_cheap)
10499 enum machine_mode mode;
10502 enum rtx_code code;
10503 tree arg0, arg1, type;
10505 enum machine_mode operand_mode;
10509 enum insn_code icode;
10510 rtx subtarget = target;
10513 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10514 result at the end. We can't simply invert the test since it would
10515 have already been inverted if it were valid. This case occurs for
10516 some floating-point comparisons. */
10518 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10519 invert = 1, exp = TREE_OPERAND (exp, 0);
10521 arg0 = TREE_OPERAND (exp, 0);
10522 arg1 = TREE_OPERAND (exp, 1);
10524 /* Don't crash if the comparison was erroneous. */
10525 if (arg0 == error_mark_node || arg1 == error_mark_node)
10528 type = TREE_TYPE (arg0);
10529 operand_mode = TYPE_MODE (type);
10530 unsignedp = TREE_UNSIGNED (type);
10532 /* We won't bother with BLKmode store-flag operations because it would mean
10533 passing a lot of information to emit_store_flag. */
10534 if (operand_mode == BLKmode)
10537 /* We won't bother with store-flag operations involving function pointers
10538 when function pointers must be canonicalized before comparisons. */
10539 #ifdef HAVE_canonicalize_funcptr_for_compare
10540 if (HAVE_canonicalize_funcptr_for_compare
10541 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10542 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10544 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10545 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10546 == FUNCTION_TYPE))))
10553 /* Get the rtx comparison code to use. We know that EXP is a comparison
10554 operation of some type. Some comparisons against 1 and -1 can be
10555 converted to comparisons with zero. Do so here so that the tests
10556 below will be aware that we have a comparison with zero. These
10557 tests will not catch constants in the first operand, but constants
10558 are rarely passed as the first operand. */
10560 switch (TREE_CODE (exp))
10569 if (integer_onep (arg1))
10570 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10572 code = unsignedp ? LTU : LT;
10575 if (! unsignedp && integer_all_onesp (arg1))
10576 arg1 = integer_zero_node, code = LT;
10578 code = unsignedp ? LEU : LE;
10581 if (! unsignedp && integer_all_onesp (arg1))
10582 arg1 = integer_zero_node, code = GE;
10584 code = unsignedp ? GTU : GT;
10587 if (integer_onep (arg1))
10588 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10590 code = unsignedp ? GEU : GE;
10593 case UNORDERED_EXPR:
10619 /* Put a constant second. */
10620 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10622 tem = arg0; arg0 = arg1; arg1 = tem;
10623 code = swap_condition (code);
10626 /* If this is an equality or inequality test of a single bit, we can
10627 do this by shifting the bit being tested to the low-order bit and
10628 masking the result with the constant 1. If the condition was EQ,
10629 we xor it with 1. This does not require an scc insn and is faster
10630 than an scc insn even if we have it. */
10632 if ((code == NE || code == EQ)
10633 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10634 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10636 tree inner = TREE_OPERAND (arg0, 0);
10637 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10640 /* If INNER is a right shift of a constant and it plus BITNUM does
10641 not overflow, adjust BITNUM and INNER. */
10643 if (TREE_CODE (inner) == RSHIFT_EXPR
10644 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10645 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10646 && bitnum < TYPE_PRECISION (type)
10647 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10648 bitnum - TYPE_PRECISION (type)))
10650 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10651 inner = TREE_OPERAND (inner, 0);
10654 /* If we are going to be able to omit the AND below, we must do our
10655 operations as unsigned. If we must use the AND, we have a choice.
10656 Normally unsigned is faster, but for some machines signed is. */
10657 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10658 #ifdef LOAD_EXTEND_OP
10659 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10665 if (! get_subtarget (subtarget)
10666 || GET_MODE (subtarget) != operand_mode
10667 || ! safe_from_p (subtarget, inner, 1))
10670 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10673 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10674 size_int (bitnum), subtarget, ops_unsignedp);
10676 if (GET_MODE (op0) != mode)
10677 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10679 if ((code == EQ && ! invert) || (code == NE && invert))
10680 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10681 ops_unsignedp, OPTAB_LIB_WIDEN);
10683 /* Put the AND last so it can combine with more things. */
10684 if (bitnum != TYPE_PRECISION (type) - 1)
10685 op0 = expand_and (op0, const1_rtx, subtarget);
10690 /* Now see if we are likely to be able to do this. Return if not. */
10691 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10694 icode = setcc_gen_code[(int) code];
10695 if (icode == CODE_FOR_nothing
10696 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10698 /* We can only do this if it is one of the special cases that
10699 can be handled without an scc insn. */
10700 if ((code == LT && integer_zerop (arg1))
10701 || (! only_cheap && code == GE && integer_zerop (arg1)))
10703 else if (BRANCH_COST >= 0
10704 && ! only_cheap && (code == NE || code == EQ)
10705 && TREE_CODE (type) != REAL_TYPE
10706 && ((abs_optab->handlers[(int) operand_mode].insn_code
10707 != CODE_FOR_nothing)
10708 || (ffs_optab->handlers[(int) operand_mode].insn_code
10709 != CODE_FOR_nothing)))
10715 if (! get_subtarget (target)
10716 || GET_MODE (subtarget) != operand_mode
10717 || ! safe_from_p (subtarget, arg1, 1))
10720 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10721 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10724 target = gen_reg_rtx (mode);
10726 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10727 because, if the emit_store_flag does anything it will succeed and
10728 OP0 and OP1 will not be used subsequently. */
10730 result = emit_store_flag (target, code,
10731 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10732 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10733 operand_mode, unsignedp, 1);
10738 result = expand_binop (mode, xor_optab, result, const1_rtx,
10739 result, 0, OPTAB_LIB_WIDEN);
10743 /* If this failed, we have to do this with set/compare/jump/set code. */
10744 if (GET_CODE (target) != REG
10745 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10746 target = gen_reg_rtx (GET_MODE (target));
10748 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10749 result = compare_from_rtx (op0, op1, code, unsignedp,
10750 operand_mode, NULL_RTX, 0);
10751 if (GET_CODE (result) == CONST_INT)
10752 return (((result == const0_rtx && ! invert)
10753 || (result != const0_rtx && invert))
10754 ? const0_rtx : const1_rtx);
10756 label = gen_label_rtx ();
10757 if (bcc_gen_fctn[(int) code] == 0)
10760 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10761 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10762 emit_label (label);
10768 /* Stubs in case we haven't got a casesi insn. */
10769 #ifndef HAVE_casesi
10770 # define HAVE_casesi 0
10771 # define gen_casesi(a, b, c, d, e) (0)
10772 # define CODE_FOR_casesi CODE_FOR_nothing
10775 /* If the machine does not have a case insn that compares the bounds,
10776 this means extra overhead for dispatch tables, which raises the
10777 threshold for using them. */
10778 #ifndef CASE_VALUES_THRESHOLD
10779 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10780 #endif /* CASE_VALUES_THRESHOLD */
10783 case_values_threshold ()
10785 return CASE_VALUES_THRESHOLD;
10788 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10789 0 otherwise (i.e. if there is no casesi instruction). */
10791 try_casesi (index_type, index_expr, minval, range,
10792 table_label, default_label)
10793 tree index_type, index_expr, minval, range;
10794 rtx table_label ATTRIBUTE_UNUSED;
10797 enum machine_mode index_mode = SImode;
10798 int index_bits = GET_MODE_BITSIZE (index_mode);
10799 rtx op1, op2, index;
10800 enum machine_mode op_mode;
10805 /* Convert the index to SImode. */
10806 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10808 enum machine_mode omode = TYPE_MODE (index_type);
10809 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10811 /* We must handle the endpoints in the original mode. */
10812 index_expr = build (MINUS_EXPR, index_type,
10813 index_expr, minval);
10814 minval = integer_zero_node;
10815 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10816 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10817 omode, 1, 0, default_label);
10818 /* Now we can safely truncate. */
10819 index = convert_to_mode (index_mode, index, 0);
10823 if (TYPE_MODE (index_type) != index_mode)
10825 index_expr = convert (type_for_size (index_bits, 0),
10827 index_type = TREE_TYPE (index_expr);
10830 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10833 index = protect_from_queue (index, 0);
10834 do_pending_stack_adjust ();
10836 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10837 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10839 index = copy_to_mode_reg (op_mode, index);
10841 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10843 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10844 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10845 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10846 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10848 op1 = copy_to_mode_reg (op_mode, op1);
10850 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10852 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10853 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10854 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10855 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10857 op2 = copy_to_mode_reg (op_mode, op2);
10859 emit_jump_insn (gen_casesi (index, op1, op2,
10860 table_label, default_label));
10864 /* Attempt to generate a tablejump instruction; same concept. */
10865 #ifndef HAVE_tablejump
10866 #define HAVE_tablejump 0
10867 #define gen_tablejump(x, y) (0)
10870 /* Subroutine of the next function.
10872 INDEX is the value being switched on, with the lowest value
10873 in the table already subtracted.
10874 MODE is its expected mode (needed if INDEX is constant).
10875 RANGE is the length of the jump table.
10876 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10878 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10879 index value is out of range. */
10882 do_tablejump (index, mode, range, table_label, default_label)
10883 rtx index, range, table_label, default_label;
10884 enum machine_mode mode;
10888 /* Do an unsigned comparison (in the proper mode) between the index
10889 expression and the value which represents the length of the range.
10890 Since we just finished subtracting the lower bound of the range
10891 from the index expression, this comparison allows us to simultaneously
10892 check that the original index expression value is both greater than
10893 or equal to the minimum value of the range and less than or equal to
10894 the maximum value of the range. */
10896 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10899 /* If index is in range, it must fit in Pmode.
10900 Convert to Pmode so we can index with it. */
10902 index = convert_to_mode (Pmode, index, 1);
10904 /* Don't let a MEM slip thru, because then INDEX that comes
10905 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10906 and break_out_memory_refs will go to work on it and mess it up. */
10907 #ifdef PIC_CASE_VECTOR_ADDRESS
10908 if (flag_pic && GET_CODE (index) != REG)
10909 index = copy_to_mode_reg (Pmode, index);
10912 /* If flag_force_addr were to affect this address
10913 it could interfere with the tricky assumptions made
10914 about addresses that contain label-refs,
10915 which may be valid only very near the tablejump itself. */
10916 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10917 GET_MODE_SIZE, because this indicates how large insns are. The other
10918 uses should all be Pmode, because they are addresses. This code
10919 could fail if addresses and insns are not the same size. */
10920 index = gen_rtx_PLUS (Pmode,
10921 gen_rtx_MULT (Pmode, index,
10922 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10923 gen_rtx_LABEL_REF (Pmode, table_label));
10924 #ifdef PIC_CASE_VECTOR_ADDRESS
10926 index = PIC_CASE_VECTOR_ADDRESS (index);
10929 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10930 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10931 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10932 RTX_UNCHANGING_P (vector) = 1;
10933 convert_move (temp, vector, 0);
10935 emit_jump_insn (gen_tablejump (temp, table_label));
10937 /* If we are generating PIC code or if the table is PC-relative, the
10938 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10939 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10944 try_tablejump (index_type, index_expr, minval, range,
10945 table_label, default_label)
10946 tree index_type, index_expr, minval, range;
10947 rtx table_label, default_label;
10951 if (! HAVE_tablejump)
10954 index_expr = fold (build (MINUS_EXPR, index_type,
10955 convert (index_type, index_expr),
10956 convert (index_type, minval)));
10957 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10959 index = protect_from_queue (index, 0);
10960 do_pending_stack_adjust ();
10962 do_tablejump (index, TYPE_MODE (index_type),
10963 convert_modes (TYPE_MODE (index_type),
10964 TYPE_MODE (TREE_TYPE (range)),
10965 expand_expr (range, NULL_RTX,
10967 TREE_UNSIGNED (TREE_TYPE (range))),
10968 table_label, default_label);