1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first. */
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
65 #define STACK_PUSH_CODE PRE_INC
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
74 /* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
81 int (*lang_safe_from_p) PARAMS ((rtx, tree));
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* Don't check memory usage, since code is being emitted to check a memory
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
94 static int in_check_memory_usage;
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
99 /* This structure is used by move_by_pieces to describe the move to
101 struct move_by_pieces
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
116 /* This structure is used by store_by_pieces to describe the clear to
119 struct store_by_pieces
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
132 extern struct obstack permanent_obstack;
134 static rtx get_push_address PARAMS ((int));
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
140 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
150 struct store_by_pieces *));
151 static rtx get_subtarget PARAMS ((rtx));
152 static int is_zeros_p PARAMS ((tree));
153 static int mostly_zeros_p PARAMS ((tree));
154 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, unsigned int, int,
158 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int,
163 unsigned int, HOST_WIDE_INT, int));
164 static enum memory_use_mode
165 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
166 static rtx var_rtx PARAMS ((tree));
167 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
176 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
178 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
180 /* Record for each mode whether we can move a register directly to or
181 from an object of that mode in memory. If we can't, we won't try
182 to use that mode directly when accessing a field of that mode. */
184 static char direct_load[NUM_MACHINE_MODES];
185 static char direct_store[NUM_MACHINE_MODES];
187 /* If a memory-to-memory move would take MOVE_RATIO or more simple
188 move-instruction sequences, we will do a movstr or libcall instead. */
191 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
194 /* If we are optimizing for space (-Os), cut down the default move ratio. */
195 #define MOVE_RATIO (optimize_size ? 3 : 15)
199 /* This macro is used to determine whether move_by_pieces should be called
200 to perform a structure copy. */
201 #ifndef MOVE_BY_PIECES_P
202 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
206 /* This array records the insn_code of insns to perform block moves. */
207 enum insn_code movstr_optab[NUM_MACHINE_MODES];
209 /* This array records the insn_code of insns to perform block clears. */
210 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
212 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
214 #ifndef SLOW_UNALIGNED_ACCESS
215 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
218 /* This is run once per compilation to set up which modes can be used
219 directly in memory and to initialize the block move optab. */
225 enum machine_mode mode;
231 /* Try indexing by frame ptr and try by stack ptr.
232 It is known that on the Convex the stack ptr isn't a valid index.
233 With luck, one or the other is valid on any machine. */
234 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
235 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
237 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
238 pat = PATTERN (insn);
240 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
241 mode = (enum machine_mode) ((int) mode + 1))
246 direct_load[(int) mode] = direct_store[(int) mode] = 0;
247 PUT_MODE (mem, mode);
248 PUT_MODE (mem1, mode);
250 /* See if there is some register that can be used in this mode and
251 directly loaded or stored from memory. */
253 if (mode != VOIDmode && mode != BLKmode)
254 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
255 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
258 if (! HARD_REGNO_MODE_OK (regno, mode))
261 reg = gen_rtx_REG (mode, regno);
264 SET_DEST (pat) = reg;
265 if (recog (pat, insn, &num_clobbers) >= 0)
266 direct_load[(int) mode] = 1;
268 SET_SRC (pat) = mem1;
269 SET_DEST (pat) = reg;
270 if (recog (pat, insn, &num_clobbers) >= 0)
271 direct_load[(int) mode] = 1;
274 SET_DEST (pat) = mem;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_store[(int) mode] = 1;
279 SET_DEST (pat) = mem1;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_store[(int) mode] = 1;
288 /* This is run at the start of compiling a function. */
293 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
296 pending_stack_adjust = 0;
297 stack_pointer_delta = 0;
298 inhibit_defer_pop = 0;
300 apply_args_value = 0;
306 struct expr_status *p;
311 ggc_mark_rtx (p->x_saveregs_value);
312 ggc_mark_rtx (p->x_apply_args_value);
313 ggc_mark_rtx (p->x_forced_labels);
324 /* Small sanity check that the queue is empty at the end of a function. */
327 finish_expr_for_function ()
333 /* Manage the queue of increment instructions to be output
334 for POSTINCREMENT_EXPR expressions, etc. */
336 /* Queue up to increment (or change) VAR later. BODY says how:
337 BODY should be the same thing you would pass to emit_insn
338 to increment right away. It will go to emit_insn later on.
340 The value is a QUEUED expression to be used in place of VAR
341 where you want to guarantee the pre-incrementation value of VAR. */
344 enqueue_insn (var, body)
347 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
348 body, pending_chain);
349 return pending_chain;
352 /* Use protect_from_queue to convert a QUEUED expression
353 into something that you can put immediately into an instruction.
354 If the queued incrementation has not happened yet,
355 protect_from_queue returns the variable itself.
356 If the incrementation has happened, protect_from_queue returns a temp
357 that contains a copy of the old value of the variable.
359 Any time an rtx which might possibly be a QUEUED is to be put
360 into an instruction, it must be passed through protect_from_queue first.
361 QUEUED expressions are not meaningful in instructions.
363 Do not pass a value through protect_from_queue and then hold
364 on to it for a while before putting it in an instruction!
365 If the queue is flushed in between, incorrect code will result. */
368 protect_from_queue (x, modify)
372 RTX_CODE code = GET_CODE (x);
374 #if 0 /* A QUEUED can hang around after the queue is forced out. */
375 /* Shortcut for most common case. */
376 if (pending_chain == 0)
382 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
383 use of autoincrement. Make a copy of the contents of the memory
384 location rather than a copy of the address, but not if the value is
385 of mode BLKmode. Don't modify X in place since it might be
387 if (code == MEM && GET_MODE (x) != BLKmode
388 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
391 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
395 rtx temp = gen_reg_rtx (GET_MODE (x));
397 emit_insn_before (gen_move_insn (temp, new),
402 /* Copy the address into a pseudo, so that the returned value
403 remains correct across calls to emit_queue. */
404 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
407 /* Otherwise, recursively protect the subexpressions of all
408 the kinds of rtx's that can contain a QUEUED. */
411 rtx tem = protect_from_queue (XEXP (x, 0), 0);
412 if (tem != XEXP (x, 0))
418 else if (code == PLUS || code == MULT)
420 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
421 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
422 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
431 /* If the increment has not happened, use the variable itself. Copy it
432 into a new pseudo so that the value remains correct across calls to
434 if (QUEUED_INSN (x) == 0)
435 return copy_to_reg (QUEUED_VAR (x));
436 /* If the increment has happened and a pre-increment copy exists,
438 if (QUEUED_COPY (x) != 0)
439 return QUEUED_COPY (x);
440 /* The increment has happened but we haven't set up a pre-increment copy.
441 Set one up now, and use it. */
442 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
443 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
445 return QUEUED_COPY (x);
448 /* Return nonzero if X contains a QUEUED expression:
449 if it contains anything that will be altered by a queued increment.
450 We handle only combinations of MEM, PLUS, MINUS and MULT operators
451 since memory addresses generally contain only those. */
457 enum rtx_code code = GET_CODE (x);
463 return queued_subexp_p (XEXP (x, 0));
467 return (queued_subexp_p (XEXP (x, 0))
468 || queued_subexp_p (XEXP (x, 1)));
474 /* Perform all the pending incrementations. */
480 while ((p = pending_chain))
482 rtx body = QUEUED_BODY (p);
484 if (GET_CODE (body) == SEQUENCE)
486 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
487 emit_insn (QUEUED_BODY (p));
490 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
491 pending_chain = QUEUED_NEXT (p);
495 /* Copy data from FROM to TO, where the machine modes are not the same.
496 Both modes may be integer, or both may be floating.
497 UNSIGNEDP should be nonzero if FROM is an unsigned type.
498 This causes zero-extension instead of sign-extension. */
501 convert_move (to, from, unsignedp)
505 enum machine_mode to_mode = GET_MODE (to);
506 enum machine_mode from_mode = GET_MODE (from);
507 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
508 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
512 /* rtx code for making an equivalent value. */
513 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
515 to = protect_from_queue (to, 1);
516 from = protect_from_queue (from, 0);
518 if (to_real != from_real)
521 /* If FROM is a SUBREG that indicates that we have already done at least
522 the required extension, strip it. We don't handle such SUBREGs as
525 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
526 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
527 >= GET_MODE_SIZE (to_mode))
528 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
529 from = gen_lowpart (to_mode, from), from_mode = to_mode;
531 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
534 if (to_mode == from_mode
535 || (from_mode == VOIDmode && CONSTANT_P (from)))
537 emit_move_insn (to, from);
541 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
543 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
546 if (VECTOR_MODE_P (to_mode))
547 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
549 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
551 emit_move_insn (to, from);
555 if (to_real != from_real)
562 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
564 /* Try converting directly if the insn is supported. */
565 if ((code = can_extend_p (to_mode, from_mode, 0))
568 emit_unop_insn (code, to, from, UNKNOWN);
573 #ifdef HAVE_trunchfqf2
574 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
576 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
580 #ifdef HAVE_trunctqfqf2
581 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
583 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
587 #ifdef HAVE_truncsfqf2
588 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
590 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
594 #ifdef HAVE_truncdfqf2
595 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
597 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
601 #ifdef HAVE_truncxfqf2
602 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
604 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
608 #ifdef HAVE_trunctfqf2
609 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
611 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
616 #ifdef HAVE_trunctqfhf2
617 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
619 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
623 #ifdef HAVE_truncsfhf2
624 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
626 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
630 #ifdef HAVE_truncdfhf2
631 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
633 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
637 #ifdef HAVE_truncxfhf2
638 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
640 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
644 #ifdef HAVE_trunctfhf2
645 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
647 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
652 #ifdef HAVE_truncsftqf2
653 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
655 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
659 #ifdef HAVE_truncdftqf2
660 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
662 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
666 #ifdef HAVE_truncxftqf2
667 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
669 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
673 #ifdef HAVE_trunctftqf2
674 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
676 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
681 #ifdef HAVE_truncdfsf2
682 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
684 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
688 #ifdef HAVE_truncxfsf2
689 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
691 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
695 #ifdef HAVE_trunctfsf2
696 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
698 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
702 #ifdef HAVE_truncxfdf2
703 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
705 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
709 #ifdef HAVE_trunctfdf2
710 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
712 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
724 libcall = extendsfdf2_libfunc;
728 libcall = extendsfxf2_libfunc;
732 libcall = extendsftf2_libfunc;
744 libcall = truncdfsf2_libfunc;
748 libcall = extenddfxf2_libfunc;
752 libcall = extenddftf2_libfunc;
764 libcall = truncxfsf2_libfunc;
768 libcall = truncxfdf2_libfunc;
780 libcall = trunctfsf2_libfunc;
784 libcall = trunctfdf2_libfunc;
796 if (libcall == (rtx) 0)
797 /* This conversion is not implemented yet. */
801 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
803 insns = get_insns ();
805 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
810 /* Now both modes are integers. */
812 /* Handle expanding beyond a word. */
813 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
814 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
821 enum machine_mode lowpart_mode;
822 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
824 /* Try converting directly if the insn is supported. */
825 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 /* If FROM is a SUBREG, put it into a register. Do this
829 so that we always generate the same set of insns for
830 better cse'ing; if an intermediate assignment occurred,
831 we won't be doing the operation directly on the SUBREG. */
832 if (optimize > 0 && GET_CODE (from) == SUBREG)
833 from = force_reg (from_mode, from);
834 emit_unop_insn (code, to, from, equiv_code);
837 /* Next, try converting via full word. */
838 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
839 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
840 != CODE_FOR_nothing))
842 if (GET_CODE (to) == REG)
843 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
844 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
845 emit_unop_insn (code, to,
846 gen_lowpart (word_mode, to), equiv_code);
850 /* No special multiword conversion insn; do it by hand. */
853 /* Since we will turn this into a no conflict block, we must ensure
854 that the source does not overlap the target. */
856 if (reg_overlap_mentioned_p (to, from))
857 from = force_reg (from_mode, from);
859 /* Get a copy of FROM widened to a word, if necessary. */
860 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
861 lowpart_mode = word_mode;
863 lowpart_mode = from_mode;
865 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
867 lowpart = gen_lowpart (lowpart_mode, to);
868 emit_move_insn (lowpart, lowfrom);
870 /* Compute the value to put in each remaining word. */
872 fill_value = const0_rtx;
877 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
878 && STORE_FLAG_VALUE == -1)
880 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
882 fill_value = gen_reg_rtx (word_mode);
883 emit_insn (gen_slt (fill_value));
889 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
890 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
892 fill_value = convert_to_mode (word_mode, fill_value, 1);
896 /* Fill the remaining words. */
897 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
899 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
900 rtx subword = operand_subword (to, index, 1, to_mode);
905 if (fill_value != subword)
906 emit_move_insn (subword, fill_value);
909 insns = get_insns ();
912 emit_no_conflict_block (insns, to, from, NULL_RTX,
913 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
917 /* Truncating multi-word to a word or less. */
918 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
919 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 convert_move (to, gen_lowpart (word_mode, from), 0);
932 /* Handle pointer conversion. */ /* SPEE 900220. */
933 if (to_mode == PQImode)
935 if (from_mode != QImode)
936 from = convert_to_mode (QImode, from, unsignedp);
938 #ifdef HAVE_truncqipqi2
939 if (HAVE_truncqipqi2)
941 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
944 #endif /* HAVE_truncqipqi2 */
948 if (from_mode == PQImode)
950 if (to_mode != QImode)
952 from = convert_to_mode (QImode, from, unsignedp);
957 #ifdef HAVE_extendpqiqi2
958 if (HAVE_extendpqiqi2)
960 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
963 #endif /* HAVE_extendpqiqi2 */
968 if (to_mode == PSImode)
970 if (from_mode != SImode)
971 from = convert_to_mode (SImode, from, unsignedp);
973 #ifdef HAVE_truncsipsi2
974 if (HAVE_truncsipsi2)
976 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
979 #endif /* HAVE_truncsipsi2 */
983 if (from_mode == PSImode)
985 if (to_mode != SImode)
987 from = convert_to_mode (SImode, from, unsignedp);
992 #ifdef HAVE_extendpsisi2
993 if (! unsignedp && HAVE_extendpsisi2)
995 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
998 #endif /* HAVE_extendpsisi2 */
999 #ifdef HAVE_zero_extendpsisi2
1000 if (unsignedp && HAVE_zero_extendpsisi2)
1002 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1005 #endif /* HAVE_zero_extendpsisi2 */
1010 if (to_mode == PDImode)
1012 if (from_mode != DImode)
1013 from = convert_to_mode (DImode, from, unsignedp);
1015 #ifdef HAVE_truncdipdi2
1016 if (HAVE_truncdipdi2)
1018 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1021 #endif /* HAVE_truncdipdi2 */
1025 if (from_mode == PDImode)
1027 if (to_mode != DImode)
1029 from = convert_to_mode (DImode, from, unsignedp);
1034 #ifdef HAVE_extendpdidi2
1035 if (HAVE_extendpdidi2)
1037 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1040 #endif /* HAVE_extendpdidi2 */
1045 /* Now follow all the conversions between integers
1046 no more than a word long. */
1048 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1049 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1050 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1051 GET_MODE_BITSIZE (from_mode)))
1053 if (!((GET_CODE (from) == MEM
1054 && ! MEM_VOLATILE_P (from)
1055 && direct_load[(int) to_mode]
1056 && ! mode_dependent_address_p (XEXP (from, 0)))
1057 || GET_CODE (from) == REG
1058 || GET_CODE (from) == SUBREG))
1059 from = force_reg (from_mode, from);
1060 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1061 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1062 from = copy_to_reg (from);
1063 emit_move_insn (to, gen_lowpart (to_mode, from));
1067 /* Handle extension. */
1068 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1070 /* Convert directly if that works. */
1071 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1072 != CODE_FOR_nothing)
1074 emit_unop_insn (code, to, from, equiv_code);
1079 enum machine_mode intermediate;
1083 /* Search for a mode to convert via. */
1084 for (intermediate = from_mode; intermediate != VOIDmode;
1085 intermediate = GET_MODE_WIDER_MODE (intermediate))
1086 if (((can_extend_p (to_mode, intermediate, unsignedp)
1087 != CODE_FOR_nothing)
1088 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1089 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1090 GET_MODE_BITSIZE (intermediate))))
1091 && (can_extend_p (intermediate, from_mode, unsignedp)
1092 != CODE_FOR_nothing))
1094 convert_move (to, convert_to_mode (intermediate, from,
1095 unsignedp), unsignedp);
1099 /* No suitable intermediate mode.
1100 Generate what we need with shifts. */
1101 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1102 - GET_MODE_BITSIZE (from_mode), 0);
1103 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1104 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1106 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1109 emit_move_insn (to, tmp);
1114 /* Support special truncate insns for certain modes. */
1116 if (from_mode == DImode && to_mode == SImode)
1118 #ifdef HAVE_truncdisi2
1119 if (HAVE_truncdisi2)
1121 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1125 convert_move (to, force_reg (from_mode, from), unsignedp);
1129 if (from_mode == DImode && to_mode == HImode)
1131 #ifdef HAVE_truncdihi2
1132 if (HAVE_truncdihi2)
1134 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1138 convert_move (to, force_reg (from_mode, from), unsignedp);
1142 if (from_mode == DImode && to_mode == QImode)
1144 #ifdef HAVE_truncdiqi2
1145 if (HAVE_truncdiqi2)
1147 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1151 convert_move (to, force_reg (from_mode, from), unsignedp);
1155 if (from_mode == SImode && to_mode == HImode)
1157 #ifdef HAVE_truncsihi2
1158 if (HAVE_truncsihi2)
1160 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1168 if (from_mode == SImode && to_mode == QImode)
1170 #ifdef HAVE_truncsiqi2
1171 if (HAVE_truncsiqi2)
1173 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1181 if (from_mode == HImode && to_mode == QImode)
1183 #ifdef HAVE_trunchiqi2
1184 if (HAVE_trunchiqi2)
1186 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1194 if (from_mode == TImode && to_mode == DImode)
1196 #ifdef HAVE_trunctidi2
1197 if (HAVE_trunctidi2)
1199 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1207 if (from_mode == TImode && to_mode == SImode)
1209 #ifdef HAVE_trunctisi2
1210 if (HAVE_trunctisi2)
1212 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1220 if (from_mode == TImode && to_mode == HImode)
1222 #ifdef HAVE_trunctihi2
1223 if (HAVE_trunctihi2)
1225 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1233 if (from_mode == TImode && to_mode == QImode)
1235 #ifdef HAVE_trunctiqi2
1236 if (HAVE_trunctiqi2)
1238 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1246 /* Handle truncation of volatile memrefs, and so on;
1247 the things that couldn't be truncated directly,
1248 and for which there was no special instruction. */
1249 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1251 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1252 emit_move_insn (to, temp);
1256 /* Mode combination is not recognized. */
1260 /* Return an rtx for a value that would result
1261 from converting X to mode MODE.
1262 Both X and MODE may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1264 This can be done by referring to a part of X in place
1265 or by copying to a new temporary with conversion.
1267 This function *must not* call protect_from_queue
1268 except when putting X into an insn (in which case convert_move does it). */
1271 convert_to_mode (mode, x, unsignedp)
1272 enum machine_mode mode;
1276 return convert_modes (mode, VOIDmode, x, unsignedp);
1279 /* Return an rtx for a value that would result
1280 from converting X from mode OLDMODE to mode MODE.
1281 Both modes may be floating, or both integer.
1282 UNSIGNEDP is nonzero if X is an unsigned value.
1284 This can be done by referring to a part of X in place
1285 or by copying to a new temporary with conversion.
1287 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1289 This function *must not* call protect_from_queue
1290 except when putting X into an insn (in which case convert_move does it). */
1293 convert_modes (mode, oldmode, x, unsignedp)
1294 enum machine_mode mode, oldmode;
1300 /* If FROM is a SUBREG that indicates that we have already done at least
1301 the required extension, strip it. */
1303 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1304 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1305 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1306 x = gen_lowpart (mode, x);
1308 if (GET_MODE (x) != VOIDmode)
1309 oldmode = GET_MODE (x);
1311 if (mode == oldmode)
1314 /* There is one case that we must handle specially: If we are converting
1315 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1316 we are to interpret the constant as unsigned, gen_lowpart will do
1317 the wrong if the constant appears negative. What we want to do is
1318 make the high-order word of the constant zero, not all ones. */
1320 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1321 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1322 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1324 HOST_WIDE_INT val = INTVAL (x);
1326 if (oldmode != VOIDmode
1327 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1329 int width = GET_MODE_BITSIZE (oldmode);
1331 /* We need to zero extend VAL. */
1332 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1335 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1338 /* We can do this with a gen_lowpart if both desired and current modes
1339 are integer, and this is either a constant integer, a register, or a
1340 non-volatile MEM. Except for the constant case where MODE is no
1341 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1343 if ((GET_CODE (x) == CONST_INT
1344 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1345 || (GET_MODE_CLASS (mode) == MODE_INT
1346 && GET_MODE_CLASS (oldmode) == MODE_INT
1347 && (GET_CODE (x) == CONST_DOUBLE
1348 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1349 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1350 && direct_load[(int) mode])
1351 || (GET_CODE (x) == REG
1352 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1353 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1355 /* ?? If we don't know OLDMODE, we have to assume here that
1356 X does not need sign- or zero-extension. This may not be
1357 the case, but it's the best we can do. */
1358 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1359 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1361 HOST_WIDE_INT val = INTVAL (x);
1362 int width = GET_MODE_BITSIZE (oldmode);
1364 /* We must sign or zero-extend in this case. Start by
1365 zero-extending, then sign extend if we need to. */
1366 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1368 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1369 val |= (HOST_WIDE_INT) (-1) << width;
1371 return GEN_INT (trunc_int_for_mode (val, mode));
1374 return gen_lowpart (mode, x);
1377 temp = gen_reg_rtx (mode);
1378 convert_move (temp, x, unsignedp);
1382 /* This macro is used to determine what the largest unit size that
1383 move_by_pieces can use is. */
1385 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1386 move efficiently, as opposed to MOVE_MAX which is the maximum
1387 number of bytes we can move with a single instruction. */
1389 #ifndef MOVE_MAX_PIECES
1390 #define MOVE_MAX_PIECES MOVE_MAX
1393 /* Generate several move instructions to copy LEN bytes from block FROM to
1394 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1395 and TO through protect_from_queue before calling.
1397 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1398 used to push FROM to the stack.
1400 ALIGN is maximum alignment we can assume. */
1403 move_by_pieces (to, from, len, align)
1405 unsigned HOST_WIDE_INT len;
1408 struct move_by_pieces data;
1409 rtx to_addr, from_addr = XEXP (from, 0);
1410 unsigned int max_size = MOVE_MAX_PIECES + 1;
1411 enum machine_mode mode = VOIDmode, tmode;
1412 enum insn_code icode;
1415 data.from_addr = from_addr;
1418 to_addr = XEXP (to, 0);
1421 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1422 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1424 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1431 #ifdef STACK_GROWS_DOWNWARD
1437 data.to_addr = to_addr;
1440 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1441 || GET_CODE (from_addr) == POST_INC
1442 || GET_CODE (from_addr) == POST_DEC);
1444 data.explicit_inc_from = 0;
1445 data.explicit_inc_to = 0;
1446 if (data.reverse) data.offset = len;
1449 /* If copying requires more than two move insns,
1450 copy addresses to registers (to make displacements shorter)
1451 and use post-increment if available. */
1452 if (!(data.autinc_from && data.autinc_to)
1453 && move_by_pieces_ninsns (len, align) > 2)
1455 /* Find the mode of the largest move... */
1456 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1457 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1458 if (GET_MODE_SIZE (tmode) < max_size)
1461 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1463 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1464 data.autinc_from = 1;
1465 data.explicit_inc_from = -1;
1467 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1469 data.from_addr = copy_addr_to_reg (from_addr);
1470 data.autinc_from = 1;
1471 data.explicit_inc_from = 1;
1473 if (!data.autinc_from && CONSTANT_P (from_addr))
1474 data.from_addr = copy_addr_to_reg (from_addr);
1475 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1477 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1479 data.explicit_inc_to = -1;
1481 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1483 data.to_addr = copy_addr_to_reg (to_addr);
1485 data.explicit_inc_to = 1;
1487 if (!data.autinc_to && CONSTANT_P (to_addr))
1488 data.to_addr = copy_addr_to_reg (to_addr);
1491 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1492 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1493 align = MOVE_MAX * BITS_PER_UNIT;
1495 /* First move what we can in the largest integer mode, then go to
1496 successively smaller modes. */
1498 while (max_size > 1)
1500 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1501 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1502 if (GET_MODE_SIZE (tmode) < max_size)
1505 if (mode == VOIDmode)
1508 icode = mov_optab->handlers[(int) mode].insn_code;
1509 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1510 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1512 max_size = GET_MODE_SIZE (mode);
1515 /* The code above should have handled everything. */
1520 /* Return number of insns required to move L bytes by pieces.
1521 ALIGN (in bits) is maximum alignment we can assume. */
1523 static unsigned HOST_WIDE_INT
1524 move_by_pieces_ninsns (l, align)
1525 unsigned HOST_WIDE_INT l;
1528 unsigned HOST_WIDE_INT n_insns = 0;
1529 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1531 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1532 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1533 align = MOVE_MAX * BITS_PER_UNIT;
1535 while (max_size > 1)
1537 enum machine_mode mode = VOIDmode, tmode;
1538 enum insn_code icode;
1540 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1541 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1542 if (GET_MODE_SIZE (tmode) < max_size)
1545 if (mode == VOIDmode)
1548 icode = mov_optab->handlers[(int) mode].insn_code;
1549 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1550 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1552 max_size = GET_MODE_SIZE (mode);
1560 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1561 with move instructions for mode MODE. GENFUN is the gen_... function
1562 to make a move insn for that mode. DATA has all the other info. */
1565 move_by_pieces_1 (genfun, mode, data)
1566 rtx (*genfun) PARAMS ((rtx, ...));
1567 enum machine_mode mode;
1568 struct move_by_pieces *data;
1570 unsigned int size = GET_MODE_SIZE (mode);
1571 rtx to1 = NULL_RTX, from1;
1573 while (data->len >= size)
1576 data->offset -= size;
1580 if (data->autinc_to)
1582 to1 = replace_equiv_address (data->to, data->to_addr);
1583 to1 = adjust_address (to1, mode, 0);
1586 to1 = adjust_address (data->to, mode, data->offset);
1589 if (data->autinc_from)
1591 from1 = replace_equiv_address (data->from, data->from_addr);
1592 from1 = adjust_address (from1, mode, 0);
1595 from1 = adjust_address (data->from, mode, data->offset);
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1598 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1599 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1600 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1603 emit_insn ((*genfun) (to1, from1));
1606 #ifdef PUSH_ROUNDING
1607 emit_single_push_insn (mode, from1, NULL);
1613 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1614 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1615 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1616 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1618 if (! data->reverse)
1619 data->offset += size;
1625 /* Emit code to move a block Y to a block X.
1626 This may be done with string-move instructions,
1627 with multiple scalar move instructions, or with a library call.
1629 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1631 SIZE is an rtx that says how long they are.
1632 ALIGN is the maximum alignment we can assume they have.
1634 Return the address of the new block, if memcpy is called and returns it,
1638 emit_block_move (x, y, size)
1643 #ifdef TARGET_MEM_FUNCTIONS
1645 tree call_expr, arg_list;
1647 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1649 if (GET_MODE (x) != BLKmode)
1652 if (GET_MODE (y) != BLKmode)
1655 x = protect_from_queue (x, 1);
1656 y = protect_from_queue (y, 0);
1657 size = protect_from_queue (size, 0);
1659 if (GET_CODE (x) != MEM)
1661 if (GET_CODE (y) != MEM)
1666 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1667 move_by_pieces (x, y, INTVAL (size), align);
1670 /* Try the most limited insn first, because there's no point
1671 including more than one in the machine description unless
1672 the more limited one has some advantage. */
1674 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1675 enum machine_mode mode;
1677 /* Since this is a move insn, we don't care about volatility. */
1680 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1681 mode = GET_MODE_WIDER_MODE (mode))
1683 enum insn_code code = movstr_optab[(int) mode];
1684 insn_operand_predicate_fn pred;
1686 if (code != CODE_FOR_nothing
1687 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1688 here because if SIZE is less than the mode mask, as it is
1689 returned by the macro, it will definitely be less than the
1690 actual mode mask. */
1691 && ((GET_CODE (size) == CONST_INT
1692 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1693 <= (GET_MODE_MASK (mode) >> 1)))
1694 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1695 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1696 || (*pred) (x, BLKmode))
1697 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1698 || (*pred) (y, BLKmode))
1699 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1700 || (*pred) (opalign, VOIDmode)))
1703 rtx last = get_last_insn ();
1706 op2 = convert_to_mode (mode, size, 1);
1707 pred = insn_data[(int) code].operand[2].predicate;
1708 if (pred != 0 && ! (*pred) (op2, mode))
1709 op2 = copy_to_mode_reg (mode, op2);
1711 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1719 delete_insns_since (last);
1725 /* X, Y, or SIZE may have been passed through protect_from_queue.
1727 It is unsafe to save the value generated by protect_from_queue
1728 and reuse it later. Consider what happens if emit_queue is
1729 called before the return value from protect_from_queue is used.
1731 Expansion of the CALL_EXPR below will call emit_queue before
1732 we are finished emitting RTL for argument setup. So if we are
1733 not careful we could get the wrong value for an argument.
1735 To avoid this problem we go ahead and emit code to copy X, Y &
1736 SIZE into new pseudos. We can then place those new pseudos
1737 into an RTL_EXPR and use them later, even after a call to
1740 Note this is not strictly needed for library calls since they
1741 do not call emit_queue before loading their arguments. However,
1742 we may need to have library calls call emit_queue in the future
1743 since failing to do so could cause problems for targets which
1744 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1745 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1746 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1748 #ifdef TARGET_MEM_FUNCTIONS
1749 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1751 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1752 TREE_UNSIGNED (integer_type_node));
1753 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1756 #ifdef TARGET_MEM_FUNCTIONS
1757 /* It is incorrect to use the libcall calling conventions to call
1758 memcpy in this context.
1760 This could be a user call to memcpy and the user may wish to
1761 examine the return value from memcpy.
1763 For targets where libcalls and normal calls have different conventions
1764 for returning pointers, we could end up generating incorrect code.
1766 So instead of using a libcall sequence we build up a suitable
1767 CALL_EXPR and expand the call in the normal fashion. */
1768 if (fn == NULL_TREE)
1772 /* This was copied from except.c, I don't know if all this is
1773 necessary in this context or not. */
1774 fn = get_identifier ("memcpy");
1775 fntype = build_pointer_type (void_type_node);
1776 fntype = build_function_type (fntype, NULL_TREE);
1777 fn = build_decl (FUNCTION_DECL, fn, fntype);
1778 ggc_add_tree_root (&fn, 1);
1779 DECL_EXTERNAL (fn) = 1;
1780 TREE_PUBLIC (fn) = 1;
1781 DECL_ARTIFICIAL (fn) = 1;
1782 TREE_NOTHROW (fn) = 1;
1783 make_decl_rtl (fn, NULL);
1784 assemble_external (fn);
1787 /* We need to make an argument list for the function call.
1789 memcpy has three arguments, the first two are void * addresses and
1790 the last is a size_t byte count for the copy. */
1792 = build_tree_list (NULL_TREE,
1793 make_tree (build_pointer_type (void_type_node), x));
1794 TREE_CHAIN (arg_list)
1795 = build_tree_list (NULL_TREE,
1796 make_tree (build_pointer_type (void_type_node), y));
1797 TREE_CHAIN (TREE_CHAIN (arg_list))
1798 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1799 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1801 /* Now we have to build up the CALL_EXPR itself. */
1802 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1803 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1804 call_expr, arg_list, NULL_TREE);
1805 TREE_SIDE_EFFECTS (call_expr) = 1;
1807 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1809 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1810 VOIDmode, 3, y, Pmode, x, Pmode,
1811 convert_to_mode (TYPE_MODE (integer_type_node), size,
1812 TREE_UNSIGNED (integer_type_node)),
1813 TYPE_MODE (integer_type_node));
1820 /* Copy all or part of a value X into registers starting at REGNO.
1821 The number of registers to be filled is NREGS. */
1824 move_block_to_reg (regno, x, nregs, mode)
1828 enum machine_mode mode;
1831 #ifdef HAVE_load_multiple
1839 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1840 x = validize_mem (force_const_mem (mode, x));
1842 /* See if the machine can do this with a load multiple insn. */
1843 #ifdef HAVE_load_multiple
1844 if (HAVE_load_multiple)
1846 last = get_last_insn ();
1847 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1855 delete_insns_since (last);
1859 for (i = 0; i < nregs; i++)
1860 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1861 operand_subword_force (x, i, mode));
1864 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1865 The number of registers to be filled is NREGS. SIZE indicates the number
1866 of bytes in the object X. */
1869 move_block_from_reg (regno, x, nregs, size)
1876 #ifdef HAVE_store_multiple
1880 enum machine_mode mode;
1885 /* If SIZE is that of a mode no bigger than a word, just use that
1886 mode's store operation. */
1887 if (size <= UNITS_PER_WORD
1888 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1890 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1894 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1895 to the left before storing to memory. Note that the previous test
1896 doesn't handle all cases (e.g. SIZE == 3). */
1897 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1899 rtx tem = operand_subword (x, 0, 1, BLKmode);
1905 shift = expand_shift (LSHIFT_EXPR, word_mode,
1906 gen_rtx_REG (word_mode, regno),
1907 build_int_2 ((UNITS_PER_WORD - size)
1908 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1909 emit_move_insn (tem, shift);
1913 /* See if the machine can do this with a store multiple insn. */
1914 #ifdef HAVE_store_multiple
1915 if (HAVE_store_multiple)
1917 last = get_last_insn ();
1918 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1926 delete_insns_since (last);
1930 for (i = 0; i < nregs; i++)
1932 rtx tem = operand_subword (x, i, 1, BLKmode);
1937 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1941 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1942 registers represented by a PARALLEL. SSIZE represents the total size of
1943 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1945 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1946 the balance will be in what would be the low-order memory addresses, i.e.
1947 left justified for big endian, right justified for little endian. This
1948 happens to be true for the targets currently using this support. If this
1949 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1953 emit_group_load (dst, orig_src, ssize, align)
1961 if (GET_CODE (dst) != PARALLEL)
1964 /* Check for a NULL entry, used to indicate that the parameter goes
1965 both on the stack and in registers. */
1966 if (XEXP (XVECEXP (dst, 0, 0), 0))
1971 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1973 /* Process the pieces. */
1974 for (i = start; i < XVECLEN (dst, 0); i++)
1976 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1977 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1978 unsigned int bytelen = GET_MODE_SIZE (mode);
1981 /* Handle trailing fragments that run over the size of the struct. */
1982 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1984 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1985 bytelen = ssize - bytepos;
1990 /* If we won't be loading directly from memory, protect the real source
1991 from strange tricks we might play; but make sure that the source can
1992 be loaded directly into the destination. */
1994 if (GET_CODE (orig_src) != MEM
1995 && (!CONSTANT_P (orig_src)
1996 || (GET_MODE (orig_src) != mode
1997 && GET_MODE (orig_src) != VOIDmode)))
1999 if (GET_MODE (orig_src) == VOIDmode)
2000 src = gen_reg_rtx (mode);
2002 src = gen_reg_rtx (GET_MODE (orig_src));
2003 emit_move_insn (src, orig_src);
2006 /* Optimize the access just a bit. */
2007 if (GET_CODE (src) == MEM
2008 && align >= GET_MODE_ALIGNMENT (mode)
2009 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2010 && bytelen == GET_MODE_SIZE (mode))
2012 tmps[i] = gen_reg_rtx (mode);
2013 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2015 else if (GET_CODE (src) == CONCAT)
2018 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2019 tmps[i] = XEXP (src, 0);
2020 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2021 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2022 tmps[i] = XEXP (src, 1);
2023 else if (bytepos == 0)
2026 mem = assign_stack_temp (GET_MODE (src),
2027 GET_MODE_SIZE (GET_MODE (src)), 0);
2028 emit_move_insn (mem, src);
2029 tmps[i] = change_address (mem, mode, XEXP (mem, 0));
2034 else if (CONSTANT_P (src)
2035 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2038 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2039 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2040 mode, mode, align, ssize);
2042 if (BYTES_BIG_ENDIAN && shift)
2043 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2044 tmps[i], 0, OPTAB_WIDEN);
2049 /* Copy the extracted pieces into the proper (probable) hard regs. */
2050 for (i = start; i < XVECLEN (dst, 0); i++)
2051 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2054 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2055 registers represented by a PARALLEL. SSIZE represents the total size of
2056 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2059 emit_group_store (orig_dst, src, ssize, align)
2067 if (GET_CODE (src) != PARALLEL)
2070 /* Check for a NULL entry, used to indicate that the parameter goes
2071 both on the stack and in registers. */
2072 if (XEXP (XVECEXP (src, 0, 0), 0))
2077 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2079 /* Copy the (probable) hard regs into pseudos. */
2080 for (i = start; i < XVECLEN (src, 0); i++)
2082 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2083 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2084 emit_move_insn (tmps[i], reg);
2088 /* If we won't be storing directly into memory, protect the real destination
2089 from strange tricks we might play. */
2091 if (GET_CODE (dst) == PARALLEL)
2095 /* We can get a PARALLEL dst if there is a conditional expression in
2096 a return statement. In that case, the dst and src are the same,
2097 so no action is necessary. */
2098 if (rtx_equal_p (dst, src))
2101 /* It is unclear if we can ever reach here, but we may as well handle
2102 it. Allocate a temporary, and split this into a store/load to/from
2105 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2106 emit_group_store (temp, src, ssize, align);
2107 emit_group_load (dst, temp, ssize, align);
2110 else if (GET_CODE (dst) != MEM)
2112 dst = gen_reg_rtx (GET_MODE (orig_dst));
2113 /* Make life a bit easier for combine. */
2114 emit_move_insn (dst, const0_rtx);
2117 /* Process the pieces. */
2118 for (i = start; i < XVECLEN (src, 0); i++)
2120 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2121 enum machine_mode mode = GET_MODE (tmps[i]);
2122 unsigned int bytelen = GET_MODE_SIZE (mode);
2124 /* Handle trailing fragments that run over the size of the struct. */
2125 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2127 if (BYTES_BIG_ENDIAN)
2129 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2130 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2131 tmps[i], 0, OPTAB_WIDEN);
2133 bytelen = ssize - bytepos;
2136 /* Optimize the access just a bit. */
2137 if (GET_CODE (dst) == MEM
2138 && align >= GET_MODE_ALIGNMENT (mode)
2139 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2140 && bytelen == GET_MODE_SIZE (mode))
2141 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2143 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2144 mode, tmps[i], align, ssize);
2149 /* Copy from the pseudo into the (probable) hard reg. */
2150 if (GET_CODE (dst) == REG)
2151 emit_move_insn (orig_dst, dst);
2154 /* Generate code to copy a BLKmode object of TYPE out of a
2155 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2156 is null, a stack temporary is created. TGTBLK is returned.
2158 The primary purpose of this routine is to handle functions
2159 that return BLKmode structures in registers. Some machines
2160 (the PA for example) want to return all small structures
2161 in registers regardless of the structure's alignment. */
2164 copy_blkmode_from_reg (tgtblk, srcreg, type)
2169 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2170 rtx src = NULL, dst = NULL;
2171 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2172 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2176 tgtblk = assign_temp (build_qualified_type (type,
2178 | TYPE_QUAL_CONST)),
2180 preserve_temp_slots (tgtblk);
2183 /* This code assumes srcreg is at least a full word. If it isn't,
2184 copy it into a new pseudo which is a full word. */
2185 if (GET_MODE (srcreg) != BLKmode
2186 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2187 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2189 /* Structures whose size is not a multiple of a word are aligned
2190 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2191 machine, this means we must skip the empty high order bytes when
2192 calculating the bit offset. */
2193 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2194 big_endian_correction
2195 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2197 /* Copy the structure BITSIZE bites at a time.
2199 We could probably emit more efficient code for machines which do not use
2200 strict alignment, but it doesn't seem worth the effort at the current
2202 for (bitpos = 0, xbitpos = big_endian_correction;
2203 bitpos < bytes * BITS_PER_UNIT;
2204 bitpos += bitsize, xbitpos += bitsize)
2206 /* We need a new source operand each time xbitpos is on a
2207 word boundary and when xbitpos == big_endian_correction
2208 (the first time through). */
2209 if (xbitpos % BITS_PER_WORD == 0
2210 || xbitpos == big_endian_correction)
2211 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2214 /* We need a new destination operand each time bitpos is on
2216 if (bitpos % BITS_PER_WORD == 0)
2217 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2219 /* Use xbitpos for the source extraction (right justified) and
2220 xbitpos for the destination store (left justified). */
2221 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2222 extract_bit_field (src, bitsize,
2223 xbitpos % BITS_PER_WORD, 1,
2224 NULL_RTX, word_mode, word_mode,
2225 bitsize, BITS_PER_WORD),
2226 bitsize, BITS_PER_WORD);
2232 /* Add a USE expression for REG to the (possibly empty) list pointed
2233 to by CALL_FUSAGE. REG must denote a hard register. */
2236 use_reg (call_fusage, reg)
2237 rtx *call_fusage, reg;
2239 if (GET_CODE (reg) != REG
2240 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2244 = gen_rtx_EXPR_LIST (VOIDmode,
2245 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2248 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2249 starting at REGNO. All of these registers must be hard registers. */
2252 use_regs (call_fusage, regno, nregs)
2259 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2262 for (i = 0; i < nregs; i++)
2263 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2266 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2267 PARALLEL REGS. This is for calls that pass values in multiple
2268 non-contiguous locations. The Irix 6 ABI has examples of this. */
2271 use_group_regs (call_fusage, regs)
2277 for (i = 0; i < XVECLEN (regs, 0); i++)
2279 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2281 /* A NULL entry means the parameter goes both on the stack and in
2282 registers. This can also be a MEM for targets that pass values
2283 partially on the stack and partially in registers. */
2284 if (reg != 0 && GET_CODE (reg) == REG)
2285 use_reg (call_fusage, reg);
2291 can_store_by_pieces (len, constfun, constfundata, align)
2292 unsigned HOST_WIDE_INT len;
2293 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2297 unsigned HOST_WIDE_INT max_size, l;
2298 HOST_WIDE_INT offset = 0;
2299 enum machine_mode mode, tmode;
2300 enum insn_code icode;
2304 if (! MOVE_BY_PIECES_P (len, align))
2307 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2308 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2309 align = MOVE_MAX * BITS_PER_UNIT;
2311 /* We would first store what we can in the largest integer mode, then go to
2312 successively smaller modes. */
2315 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2320 max_size = MOVE_MAX_PIECES + 1;
2321 while (max_size > 1)
2323 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2324 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2325 if (GET_MODE_SIZE (tmode) < max_size)
2328 if (mode == VOIDmode)
2331 icode = mov_optab->handlers[(int) mode].insn_code;
2332 if (icode != CODE_FOR_nothing
2333 && align >= GET_MODE_ALIGNMENT (mode))
2335 unsigned int size = GET_MODE_SIZE (mode);
2342 cst = (*constfun) (constfundata, offset, mode);
2343 if (!LEGITIMATE_CONSTANT_P (cst))
2353 max_size = GET_MODE_SIZE (mode);
2356 /* The code above should have handled everything. */
2364 /* Generate several move instructions to store LEN bytes generated by
2365 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2366 pointer which will be passed as argument in every CONSTFUN call.
2367 ALIGN is maximum alignment we can assume. */
2370 store_by_pieces (to, len, constfun, constfundata, align)
2372 unsigned HOST_WIDE_INT len;
2373 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2377 struct store_by_pieces data;
2379 if (! MOVE_BY_PIECES_P (len, align))
2381 to = protect_from_queue (to, 1);
2382 data.constfun = constfun;
2383 data.constfundata = constfundata;
2386 store_by_pieces_1 (&data, align);
2389 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2390 rtx with BLKmode). The caller must pass TO through protect_from_queue
2391 before calling. ALIGN is maximum alignment we can assume. */
2394 clear_by_pieces (to, len, align)
2396 unsigned HOST_WIDE_INT len;
2399 struct store_by_pieces data;
2401 data.constfun = clear_by_pieces_1;
2402 data.constfundata = NULL;
2405 store_by_pieces_1 (&data, align);
2408 /* Callback routine for clear_by_pieces.
2409 Return const0_rtx unconditionally. */
2412 clear_by_pieces_1 (data, offset, mode)
2413 PTR data ATTRIBUTE_UNUSED;
2414 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2415 enum machine_mode mode ATTRIBUTE_UNUSED;
2420 /* Subroutine of clear_by_pieces and store_by_pieces.
2421 Generate several move instructions to store LEN bytes of block TO. (A MEM
2422 rtx with BLKmode). The caller must pass TO through protect_from_queue
2423 before calling. ALIGN is maximum alignment we can assume. */
2426 store_by_pieces_1 (data, align)
2427 struct store_by_pieces *data;
2430 rtx to_addr = XEXP (data->to, 0);
2431 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2432 enum machine_mode mode = VOIDmode, tmode;
2433 enum insn_code icode;
2436 data->to_addr = to_addr;
2438 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2439 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2441 data->explicit_inc_to = 0;
2443 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2445 data->offset = data->len;
2447 /* If storing requires more than two move insns,
2448 copy addresses to registers (to make displacements shorter)
2449 and use post-increment if available. */
2450 if (!data->autinc_to
2451 && move_by_pieces_ninsns (data->len, align) > 2)
2453 /* Determine the main mode we'll be using. */
2454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2456 if (GET_MODE_SIZE (tmode) < max_size)
2459 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2461 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2462 data->autinc_to = 1;
2463 data->explicit_inc_to = -1;
2466 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2467 && ! data->autinc_to)
2469 data->to_addr = copy_addr_to_reg (to_addr);
2470 data->autinc_to = 1;
2471 data->explicit_inc_to = 1;
2474 if ( !data->autinc_to && CONSTANT_P (to_addr))
2475 data->to_addr = copy_addr_to_reg (to_addr);
2478 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2479 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2480 align = MOVE_MAX * BITS_PER_UNIT;
2482 /* First store what we can in the largest integer mode, then go to
2483 successively smaller modes. */
2485 while (max_size > 1)
2487 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2488 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2489 if (GET_MODE_SIZE (tmode) < max_size)
2492 if (mode == VOIDmode)
2495 icode = mov_optab->handlers[(int) mode].insn_code;
2496 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2497 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2499 max_size = GET_MODE_SIZE (mode);
2502 /* The code above should have handled everything. */
2507 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2508 with move instructions for mode MODE. GENFUN is the gen_... function
2509 to make a move insn for that mode. DATA has all the other info. */
2512 store_by_pieces_2 (genfun, mode, data)
2513 rtx (*genfun) PARAMS ((rtx, ...));
2514 enum machine_mode mode;
2515 struct store_by_pieces *data;
2517 unsigned int size = GET_MODE_SIZE (mode);
2520 while (data->len >= size)
2523 data->offset -= size;
2525 if (data->autinc_to)
2527 to1 = replace_equiv_address (data->to, data->to_addr);
2528 to1 = adjust_address (to1, mode, 0);
2531 to1 = adjust_address (data->to, mode, data->offset);
2533 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2534 emit_insn (gen_add2_insn (data->to_addr,
2535 GEN_INT (-(HOST_WIDE_INT) size)));
2537 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2538 emit_insn ((*genfun) (to1, cst));
2540 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2541 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2543 if (! data->reverse)
2544 data->offset += size;
2550 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2551 its length in bytes. */
2554 clear_storage (object, size)
2558 #ifdef TARGET_MEM_FUNCTIONS
2560 tree call_expr, arg_list;
2563 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2564 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2566 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2567 just move a zero. Otherwise, do this a piece at a time. */
2568 if (GET_MODE (object) != BLKmode
2569 && GET_CODE (size) == CONST_INT
2570 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2571 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2574 object = protect_from_queue (object, 1);
2575 size = protect_from_queue (size, 0);
2577 if (GET_CODE (size) == CONST_INT
2578 && MOVE_BY_PIECES_P (INTVAL (size), align))
2579 clear_by_pieces (object, INTVAL (size), align);
2582 /* Try the most limited insn first, because there's no point
2583 including more than one in the machine description unless
2584 the more limited one has some advantage. */
2586 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2587 enum machine_mode mode;
2589 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2590 mode = GET_MODE_WIDER_MODE (mode))
2592 enum insn_code code = clrstr_optab[(int) mode];
2593 insn_operand_predicate_fn pred;
2595 if (code != CODE_FOR_nothing
2596 /* We don't need MODE to be narrower than
2597 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2598 the mode mask, as it is returned by the macro, it will
2599 definitely be less than the actual mode mask. */
2600 && ((GET_CODE (size) == CONST_INT
2601 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2602 <= (GET_MODE_MASK (mode) >> 1)))
2603 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2604 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2605 || (*pred) (object, BLKmode))
2606 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2607 || (*pred) (opalign, VOIDmode)))
2610 rtx last = get_last_insn ();
2613 op1 = convert_to_mode (mode, size, 1);
2614 pred = insn_data[(int) code].operand[1].predicate;
2615 if (pred != 0 && ! (*pred) (op1, mode))
2616 op1 = copy_to_mode_reg (mode, op1);
2618 pat = GEN_FCN ((int) code) (object, op1, opalign);
2625 delete_insns_since (last);
2629 /* OBJECT or SIZE may have been passed through protect_from_queue.
2631 It is unsafe to save the value generated by protect_from_queue
2632 and reuse it later. Consider what happens if emit_queue is
2633 called before the return value from protect_from_queue is used.
2635 Expansion of the CALL_EXPR below will call emit_queue before
2636 we are finished emitting RTL for argument setup. So if we are
2637 not careful we could get the wrong value for an argument.
2639 To avoid this problem we go ahead and emit code to copy OBJECT
2640 and SIZE into new pseudos. We can then place those new pseudos
2641 into an RTL_EXPR and use them later, even after a call to
2644 Note this is not strictly needed for library calls since they
2645 do not call emit_queue before loading their arguments. However,
2646 we may need to have library calls call emit_queue in the future
2647 since failing to do so could cause problems for targets which
2648 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2649 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2651 #ifdef TARGET_MEM_FUNCTIONS
2652 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2654 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2655 TREE_UNSIGNED (integer_type_node));
2656 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2659 #ifdef TARGET_MEM_FUNCTIONS
2660 /* It is incorrect to use the libcall calling conventions to call
2661 memset in this context.
2663 This could be a user call to memset and the user may wish to
2664 examine the return value from memset.
2666 For targets where libcalls and normal calls have different
2667 conventions for returning pointers, we could end up generating
2670 So instead of using a libcall sequence we build up a suitable
2671 CALL_EXPR and expand the call in the normal fashion. */
2672 if (fn == NULL_TREE)
2676 /* This was copied from except.c, I don't know if all this is
2677 necessary in this context or not. */
2678 fn = get_identifier ("memset");
2679 fntype = build_pointer_type (void_type_node);
2680 fntype = build_function_type (fntype, NULL_TREE);
2681 fn = build_decl (FUNCTION_DECL, fn, fntype);
2682 ggc_add_tree_root (&fn, 1);
2683 DECL_EXTERNAL (fn) = 1;
2684 TREE_PUBLIC (fn) = 1;
2685 DECL_ARTIFICIAL (fn) = 1;
2686 TREE_NOTHROW (fn) = 1;
2687 make_decl_rtl (fn, NULL);
2688 assemble_external (fn);
2691 /* We need to make an argument list for the function call.
2693 memset has three arguments, the first is a void * addresses, the
2694 second an integer with the initialization value, the last is a
2695 size_t byte count for the copy. */
2697 = build_tree_list (NULL_TREE,
2698 make_tree (build_pointer_type (void_type_node),
2700 TREE_CHAIN (arg_list)
2701 = build_tree_list (NULL_TREE,
2702 make_tree (integer_type_node, const0_rtx));
2703 TREE_CHAIN (TREE_CHAIN (arg_list))
2704 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2705 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2707 /* Now we have to build up the CALL_EXPR itself. */
2708 call_expr = build1 (ADDR_EXPR,
2709 build_pointer_type (TREE_TYPE (fn)), fn);
2710 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2711 call_expr, arg_list, NULL_TREE);
2712 TREE_SIDE_EFFECTS (call_expr) = 1;
2714 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2716 emit_library_call (bzero_libfunc, LCT_NORMAL,
2717 VOIDmode, 2, object, Pmode, size,
2718 TYPE_MODE (integer_type_node));
2726 /* Generate code to copy Y into X.
2727 Both Y and X must have the same mode, except that
2728 Y can be a constant with VOIDmode.
2729 This mode cannot be BLKmode; use emit_block_move for that.
2731 Return the last instruction emitted. */
2734 emit_move_insn (x, y)
2737 enum machine_mode mode = GET_MODE (x);
2738 rtx y_cst = NULL_RTX;
2741 x = protect_from_queue (x, 1);
2742 y = protect_from_queue (y, 0);
2744 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2747 /* Never force constant_p_rtx to memory. */
2748 if (GET_CODE (y) == CONSTANT_P_RTX)
2750 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2753 y = force_const_mem (mode, y);
2756 /* If X or Y are memory references, verify that their addresses are valid
2758 if (GET_CODE (x) == MEM
2759 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2760 && ! push_operand (x, GET_MODE (x)))
2762 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2763 x = validize_mem (x);
2765 if (GET_CODE (y) == MEM
2766 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2768 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2769 y = validize_mem (y);
2771 if (mode == BLKmode)
2774 last_insn = emit_move_insn_1 (x, y);
2776 if (y_cst && GET_CODE (x) == REG)
2777 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2782 /* Low level part of emit_move_insn.
2783 Called just like emit_move_insn, but assumes X and Y
2784 are basically valid. */
2787 emit_move_insn_1 (x, y)
2790 enum machine_mode mode = GET_MODE (x);
2791 enum machine_mode submode;
2792 enum mode_class class = GET_MODE_CLASS (mode);
2795 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2798 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2800 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2802 /* Expand complex moves by moving real part and imag part, if possible. */
2803 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2804 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2806 (class == MODE_COMPLEX_INT
2807 ? MODE_INT : MODE_FLOAT),
2809 && (mov_optab->handlers[(int) submode].insn_code
2810 != CODE_FOR_nothing))
2812 /* Don't split destination if it is a stack push. */
2813 int stack = push_operand (x, GET_MODE (x));
2815 #ifdef PUSH_ROUNDING
2816 /* In case we output to the stack, but the size is smaller machine can
2817 push exactly, we need to use move instructions. */
2819 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2822 int offset1, offset2;
2824 /* Do not use anti_adjust_stack, since we don't want to update
2825 stack_pointer_delta. */
2826 temp = expand_binop (Pmode,
2827 #ifdef STACK_GROWS_DOWNWARD
2834 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2838 if (temp != stack_pointer_rtx)
2839 emit_move_insn (stack_pointer_rtx, temp);
2840 #ifdef STACK_GROWS_DOWNWARD
2842 offset2 = GET_MODE_SIZE (submode);
2844 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2845 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2846 + GET_MODE_SIZE (submode));
2848 emit_move_insn (change_address (x, submode,
2849 gen_rtx_PLUS (Pmode,
2851 GEN_INT (offset1))),
2852 gen_realpart (submode, y));
2853 emit_move_insn (change_address (x, submode,
2854 gen_rtx_PLUS (Pmode,
2856 GEN_INT (offset2))),
2857 gen_imagpart (submode, y));
2861 /* If this is a stack, push the highpart first, so it
2862 will be in the argument order.
2864 In that case, change_address is used only to convert
2865 the mode, not to change the address. */
2868 /* Note that the real part always precedes the imag part in memory
2869 regardless of machine's endianness. */
2870 #ifdef STACK_GROWS_DOWNWARD
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2872 (gen_rtx_MEM (submode, XEXP (x, 0)),
2873 gen_imagpart (submode, y)));
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_realpart (submode, y)));
2878 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2879 (gen_rtx_MEM (submode, XEXP (x, 0)),
2880 gen_realpart (submode, y)));
2881 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2882 (gen_rtx_MEM (submode, XEXP (x, 0)),
2883 gen_imagpart (submode, y)));
2888 rtx realpart_x, realpart_y;
2889 rtx imagpart_x, imagpart_y;
2891 /* If this is a complex value with each part being smaller than a
2892 word, the usual calling sequence will likely pack the pieces into
2893 a single register. Unfortunately, SUBREG of hard registers only
2894 deals in terms of words, so we have a problem converting input
2895 arguments to the CONCAT of two registers that is used elsewhere
2896 for complex values. If this is before reload, we can copy it into
2897 memory and reload. FIXME, we should see about using extract and
2898 insert on integer registers, but complex short and complex char
2899 variables should be rarely used. */
2900 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2901 && (reload_in_progress | reload_completed) == 0)
2903 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2904 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2906 if (packed_dest_p || packed_src_p)
2908 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2909 ? MODE_FLOAT : MODE_INT);
2911 enum machine_mode reg_mode
2912 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2914 if (reg_mode != BLKmode)
2916 rtx mem = assign_stack_temp (reg_mode,
2917 GET_MODE_SIZE (mode), 0);
2918 rtx cmem = adjust_address (mem, mode, 0);
2921 = N_("function using short complex types cannot be inline");
2925 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2926 emit_move_insn_1 (cmem, y);
2927 return emit_move_insn_1 (sreg, mem);
2931 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2932 emit_move_insn_1 (mem, sreg);
2933 return emit_move_insn_1 (x, cmem);
2939 realpart_x = gen_realpart (submode, x);
2940 realpart_y = gen_realpart (submode, y);
2941 imagpart_x = gen_imagpart (submode, x);
2942 imagpart_y = gen_imagpart (submode, y);
2944 /* Show the output dies here. This is necessary for SUBREGs
2945 of pseudos since we cannot track their lifetimes correctly;
2946 hard regs shouldn't appear here except as return values.
2947 We never want to emit such a clobber after reload. */
2949 && ! (reload_in_progress || reload_completed)
2950 && (GET_CODE (realpart_x) == SUBREG
2951 || GET_CODE (imagpart_x) == SUBREG))
2953 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2956 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2957 (realpart_x, realpart_y));
2958 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2959 (imagpart_x, imagpart_y));
2962 return get_last_insn ();
2965 /* This will handle any multi-word mode that lacks a move_insn pattern.
2966 However, you will get better code if you define such patterns,
2967 even if they must turn into multiple assembler instructions. */
2968 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2974 #ifdef PUSH_ROUNDING
2976 /* If X is a push on the stack, do the push now and replace
2977 X with a reference to the stack pointer. */
2978 if (push_operand (x, GET_MODE (x)))
2983 /* Do not use anti_adjust_stack, since we don't want to update
2984 stack_pointer_delta. */
2985 temp = expand_binop (Pmode,
2986 #ifdef STACK_GROWS_DOWNWARD
2993 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2997 if (temp != stack_pointer_rtx)
2998 emit_move_insn (stack_pointer_rtx, temp);
3000 code = GET_CODE (XEXP (x, 0));
3001 /* Just hope that small offsets off SP are OK. */
3002 if (code == POST_INC)
3003 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3004 GEN_INT (-(HOST_WIDE_INT)
3005 GET_MODE_SIZE (GET_MODE (x))));
3006 else if (code == POST_DEC)
3007 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3008 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3010 temp = stack_pointer_rtx;
3012 x = change_address (x, VOIDmode, temp);
3016 /* If we are in reload, see if either operand is a MEM whose address
3017 is scheduled for replacement. */
3018 if (reload_in_progress && GET_CODE (x) == MEM
3019 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3020 x = replace_equiv_address_nv (x, inner);
3021 if (reload_in_progress && GET_CODE (y) == MEM
3022 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3023 y = replace_equiv_address_nv (y, inner);
3029 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3032 rtx xpart = operand_subword (x, i, 1, mode);
3033 rtx ypart = operand_subword (y, i, 1, mode);
3035 /* If we can't get a part of Y, put Y into memory if it is a
3036 constant. Otherwise, force it into a register. If we still
3037 can't get a part of Y, abort. */
3038 if (ypart == 0 && CONSTANT_P (y))
3040 y = force_const_mem (mode, y);
3041 ypart = operand_subword (y, i, 1, mode);
3043 else if (ypart == 0)
3044 ypart = operand_subword_force (y, i, mode);
3046 if (xpart == 0 || ypart == 0)
3049 need_clobber |= (GET_CODE (xpart) == SUBREG);
3051 last_insn = emit_move_insn (xpart, ypart);
3054 seq = gen_sequence ();
3057 /* Show the output dies here. This is necessary for SUBREGs
3058 of pseudos since we cannot track their lifetimes correctly;
3059 hard regs shouldn't appear here except as return values.
3060 We never want to emit such a clobber after reload. */
3062 && ! (reload_in_progress || reload_completed)
3063 && need_clobber != 0)
3065 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3076 /* Pushing data onto the stack. */
3078 /* Push a block of length SIZE (perhaps variable)
3079 and return an rtx to address the beginning of the block.
3080 Note that it is not possible for the value returned to be a QUEUED.
3081 The value may be virtual_outgoing_args_rtx.
3083 EXTRA is the number of bytes of padding to push in addition to SIZE.
3084 BELOW nonzero means this padding comes at low addresses;
3085 otherwise, the padding comes at high addresses. */
3088 push_block (size, extra, below)
3094 size = convert_modes (Pmode, ptr_mode, size, 1);
3095 if (CONSTANT_P (size))
3096 anti_adjust_stack (plus_constant (size, extra));
3097 else if (GET_CODE (size) == REG && extra == 0)
3098 anti_adjust_stack (size);
3101 temp = copy_to_mode_reg (Pmode, size);
3103 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3104 temp, 0, OPTAB_LIB_WIDEN);
3105 anti_adjust_stack (temp);
3108 #ifndef STACK_GROWS_DOWNWARD
3114 temp = virtual_outgoing_args_rtx;
3115 if (extra != 0 && below)
3116 temp = plus_constant (temp, extra);
3120 if (GET_CODE (size) == CONST_INT)
3121 temp = plus_constant (virtual_outgoing_args_rtx,
3122 -INTVAL (size) - (below ? 0 : extra));
3123 else if (extra != 0 && !below)
3124 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3125 negate_rtx (Pmode, plus_constant (size, extra)));
3127 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3128 negate_rtx (Pmode, size));
3131 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3135 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3136 block of SIZE bytes. */
3139 get_push_address (size)
3144 if (STACK_PUSH_CODE == POST_DEC)
3145 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3146 else if (STACK_PUSH_CODE == POST_INC)
3147 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3149 temp = stack_pointer_rtx;
3151 return copy_to_reg (temp);
3154 #ifdef PUSH_ROUNDING
3156 /* Emit single push insn. */
3159 emit_single_push_insn (mode, x, type)
3161 enum machine_mode mode;
3165 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3167 enum insn_code icode;
3168 insn_operand_predicate_fn pred;
3170 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3171 /* If there is push pattern, use it. Otherwise try old way of throwing
3172 MEM representing push operation to move expander. */
3173 icode = push_optab->handlers[(int) mode].insn_code;
3174 if (icode != CODE_FOR_nothing)
3176 if (((pred = insn_data[(int) icode].operand[0].predicate)
3177 && !((*pred) (x, mode))))
3178 x = force_reg (mode, x);
3179 emit_insn (GEN_FCN (icode) (x));
3182 if (GET_MODE_SIZE (mode) == rounded_size)
3183 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3186 #ifdef STACK_GROWS_DOWNWARD
3187 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3188 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3190 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3191 GEN_INT (rounded_size));
3193 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3196 dest = gen_rtx_MEM (mode, dest_addr);
3200 set_mem_attributes (dest, type, 1);
3201 /* Function incoming arguments may overlap with sibling call
3202 outgoing arguments and we cannot allow reordering of reads
3203 from function arguments with stores to outgoing arguments
3204 of sibling calls. */
3205 set_mem_alias_set (dest, 0);
3207 emit_move_insn (dest, x);
3211 /* Generate code to push X onto the stack, assuming it has mode MODE and
3213 MODE is redundant except when X is a CONST_INT (since they don't
3215 SIZE is an rtx for the size of data to be copied (in bytes),
3216 needed only if X is BLKmode.
3218 ALIGN (in bits) is maximum alignment we can assume.
3220 If PARTIAL and REG are both nonzero, then copy that many of the first
3221 words of X into registers starting with REG, and push the rest of X.
3222 The amount of space pushed is decreased by PARTIAL words,
3223 rounded *down* to a multiple of PARM_BOUNDARY.
3224 REG must be a hard register in this case.
3225 If REG is zero but PARTIAL is not, take any all others actions for an
3226 argument partially in registers, but do not actually load any
3229 EXTRA is the amount in bytes of extra space to leave next to this arg.
3230 This is ignored if an argument block has already been allocated.
3232 On a machine that lacks real push insns, ARGS_ADDR is the address of
3233 the bottom of the argument block for this call. We use indexing off there
3234 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3235 argument block has not been preallocated.
3237 ARGS_SO_FAR is the size of args previously pushed for this call.
3239 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3240 for arguments passed in registers. If nonzero, it will be the number
3241 of bytes required. */
3244 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3245 args_addr, args_so_far, reg_parm_stack_space,
3248 enum machine_mode mode;
3257 int reg_parm_stack_space;
3261 enum direction stack_direction
3262 #ifdef STACK_GROWS_DOWNWARD
3268 /* Decide where to pad the argument: `downward' for below,
3269 `upward' for above, or `none' for don't pad it.
3270 Default is below for small data on big-endian machines; else above. */
3271 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3273 /* Invert direction if stack is post-decrement.
3275 if (STACK_PUSH_CODE == POST_DEC)
3276 if (where_pad != none)
3277 where_pad = (where_pad == downward ? upward : downward);
3279 xinner = x = protect_from_queue (x, 0);
3281 if (mode == BLKmode)
3283 /* Copy a block into the stack, entirely or partially. */
3286 int used = partial * UNITS_PER_WORD;
3287 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3295 /* USED is now the # of bytes we need not copy to the stack
3296 because registers will take care of them. */
3299 xinner = adjust_address (xinner, BLKmode, used);
3301 /* If the partial register-part of the arg counts in its stack size,
3302 skip the part of stack space corresponding to the registers.
3303 Otherwise, start copying to the beginning of the stack space,
3304 by setting SKIP to 0. */
3305 skip = (reg_parm_stack_space == 0) ? 0 : used;
3307 #ifdef PUSH_ROUNDING
3308 /* Do it with several push insns if that doesn't take lots of insns
3309 and if there is no difficulty with push insns that skip bytes
3310 on the stack for alignment purposes. */
3313 && GET_CODE (size) == CONST_INT
3315 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3316 /* Here we avoid the case of a structure whose weak alignment
3317 forces many pushes of a small amount of data,
3318 and such small pushes do rounding that causes trouble. */
3319 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3320 || align >= BIGGEST_ALIGNMENT
3321 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3322 == (align / BITS_PER_UNIT)))
3323 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3325 /* Push padding now if padding above and stack grows down,
3326 or if padding below and stack grows up.
3327 But if space already allocated, this has already been done. */
3328 if (extra && args_addr == 0
3329 && where_pad != none && where_pad != stack_direction)
3330 anti_adjust_stack (GEN_INT (extra));
3332 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3334 if (current_function_check_memory_usage && ! in_check_memory_usage)
3338 in_check_memory_usage = 1;
3339 temp = get_push_address (INTVAL (size) - used);
3340 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3341 emit_library_call (chkr_copy_bitmap_libfunc,
3342 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3343 Pmode, XEXP (xinner, 0), Pmode,
3344 GEN_INT (INTVAL (size) - used),
3345 TYPE_MODE (sizetype));
3347 emit_library_call (chkr_set_right_libfunc,
3348 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3349 Pmode, GEN_INT (INTVAL (size) - used),
3350 TYPE_MODE (sizetype),
3351 GEN_INT (MEMORY_USE_RW),
3352 TYPE_MODE (integer_type_node));
3353 in_check_memory_usage = 0;
3357 #endif /* PUSH_ROUNDING */
3361 /* Otherwise make space on the stack and copy the data
3362 to the address of that space. */
3364 /* Deduct words put into registers from the size we must copy. */
3367 if (GET_CODE (size) == CONST_INT)
3368 size = GEN_INT (INTVAL (size) - used);
3370 size = expand_binop (GET_MODE (size), sub_optab, size,
3371 GEN_INT (used), NULL_RTX, 0,
3375 /* Get the address of the stack space.
3376 In this case, we do not deal with EXTRA separately.
3377 A single stack adjust will do. */
3380 temp = push_block (size, extra, where_pad == downward);
3383 else if (GET_CODE (args_so_far) == CONST_INT)
3384 temp = memory_address (BLKmode,
3385 plus_constant (args_addr,
3386 skip + INTVAL (args_so_far)));
3388 temp = memory_address (BLKmode,
3389 plus_constant (gen_rtx_PLUS (Pmode,
3393 if (current_function_check_memory_usage && ! in_check_memory_usage)
3395 in_check_memory_usage = 1;
3396 target = copy_to_reg (temp);
3397 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3398 emit_library_call (chkr_copy_bitmap_libfunc,
3399 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3401 XEXP (xinner, 0), Pmode,
3402 size, TYPE_MODE (sizetype));
3404 emit_library_call (chkr_set_right_libfunc,
3405 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3407 size, TYPE_MODE (sizetype),
3408 GEN_INT (MEMORY_USE_RW),
3409 TYPE_MODE (integer_type_node));
3410 in_check_memory_usage = 0;
3413 target = gen_rtx_MEM (BLKmode, temp);
3417 set_mem_attributes (target, type, 1);
3418 /* Function incoming arguments may overlap with sibling call
3419 outgoing arguments and we cannot allow reordering of reads
3420 from function arguments with stores to outgoing arguments
3421 of sibling calls. */
3422 set_mem_alias_set (target, 0);
3425 set_mem_align (target, align);
3427 /* TEMP is the address of the block. Copy the data there. */
3428 if (GET_CODE (size) == CONST_INT
3429 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3431 move_by_pieces (target, xinner, INTVAL (size), align);
3436 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3437 enum machine_mode mode;
3439 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3441 mode = GET_MODE_WIDER_MODE (mode))
3443 enum insn_code code = movstr_optab[(int) mode];
3444 insn_operand_predicate_fn pred;
3446 if (code != CODE_FOR_nothing
3447 && ((GET_CODE (size) == CONST_INT
3448 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3449 <= (GET_MODE_MASK (mode) >> 1)))
3450 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3451 && (!(pred = insn_data[(int) code].operand[0].predicate)
3452 || ((*pred) (target, BLKmode)))
3453 && (!(pred = insn_data[(int) code].operand[1].predicate)
3454 || ((*pred) (xinner, BLKmode)))
3455 && (!(pred = insn_data[(int) code].operand[3].predicate)
3456 || ((*pred) (opalign, VOIDmode))))
3458 rtx op2 = convert_to_mode (mode, size, 1);
3459 rtx last = get_last_insn ();
3462 pred = insn_data[(int) code].operand[2].predicate;
3463 if (pred != 0 && ! (*pred) (op2, mode))
3464 op2 = copy_to_mode_reg (mode, op2);
3466 pat = GEN_FCN ((int) code) (target, xinner,
3474 delete_insns_since (last);
3479 if (!ACCUMULATE_OUTGOING_ARGS)
3481 /* If the source is referenced relative to the stack pointer,
3482 copy it to another register to stabilize it. We do not need
3483 to do this if we know that we won't be changing sp. */
3485 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3486 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3487 temp = copy_to_reg (temp);
3490 /* Make inhibit_defer_pop nonzero around the library call
3491 to force it to pop the bcopy-arguments right away. */
3493 #ifdef TARGET_MEM_FUNCTIONS
3494 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3495 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3496 convert_to_mode (TYPE_MODE (sizetype),
3497 size, TREE_UNSIGNED (sizetype)),
3498 TYPE_MODE (sizetype));
3500 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3501 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3502 convert_to_mode (TYPE_MODE (integer_type_node),
3504 TREE_UNSIGNED (integer_type_node)),
3505 TYPE_MODE (integer_type_node));
3510 else if (partial > 0)
3512 /* Scalar partly in registers. */
3514 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3517 /* # words of start of argument
3518 that we must make space for but need not store. */
3519 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3520 int args_offset = INTVAL (args_so_far);
3523 /* Push padding now if padding above and stack grows down,
3524 or if padding below and stack grows up.
3525 But if space already allocated, this has already been done. */
3526 if (extra && args_addr == 0
3527 && where_pad != none && where_pad != stack_direction)
3528 anti_adjust_stack (GEN_INT (extra));
3530 /* If we make space by pushing it, we might as well push
3531 the real data. Otherwise, we can leave OFFSET nonzero
3532 and leave the space uninitialized. */
3536 /* Now NOT_STACK gets the number of words that we don't need to
3537 allocate on the stack. */
3538 not_stack = partial - offset;
3540 /* If the partial register-part of the arg counts in its stack size,
3541 skip the part of stack space corresponding to the registers.
3542 Otherwise, start copying to the beginning of the stack space,
3543 by setting SKIP to 0. */
3544 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3546 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3547 x = validize_mem (force_const_mem (mode, x));
3549 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3550 SUBREGs of such registers are not allowed. */
3551 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3552 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3553 x = copy_to_reg (x);
3555 /* Loop over all the words allocated on the stack for this arg. */
3556 /* We can do it by words, because any scalar bigger than a word
3557 has a size a multiple of a word. */
3558 #ifndef PUSH_ARGS_REVERSED
3559 for (i = not_stack; i < size; i++)
3561 for (i = size - 1; i >= not_stack; i--)
3563 if (i >= not_stack + offset)
3564 emit_push_insn (operand_subword_force (x, i, mode),
3565 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3567 GEN_INT (args_offset + ((i - not_stack + skip)
3569 reg_parm_stack_space, alignment_pad);
3574 rtx target = NULL_RTX;
3577 /* Push padding now if padding above and stack grows down,
3578 or if padding below and stack grows up.
3579 But if space already allocated, this has already been done. */
3580 if (extra && args_addr == 0
3581 && where_pad != none && where_pad != stack_direction)
3582 anti_adjust_stack (GEN_INT (extra));
3584 #ifdef PUSH_ROUNDING
3585 if (args_addr == 0 && PUSH_ARGS)
3586 emit_single_push_insn (mode, x, type);
3590 if (GET_CODE (args_so_far) == CONST_INT)
3592 = memory_address (mode,
3593 plus_constant (args_addr,
3594 INTVAL (args_so_far)));
3596 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3599 dest = gen_rtx_MEM (mode, addr);
3602 set_mem_attributes (dest, type, 1);
3603 /* Function incoming arguments may overlap with sibling call
3604 outgoing arguments and we cannot allow reordering of reads
3605 from function arguments with stores to outgoing arguments
3606 of sibling calls. */
3607 set_mem_alias_set (dest, 0);
3610 emit_move_insn (dest, x);
3614 if (current_function_check_memory_usage && ! in_check_memory_usage)
3616 in_check_memory_usage = 1;
3618 target = get_push_address (GET_MODE_SIZE (mode));
3620 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3621 emit_library_call (chkr_copy_bitmap_libfunc,
3622 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3623 Pmode, XEXP (x, 0), Pmode,
3624 GEN_INT (GET_MODE_SIZE (mode)),
3625 TYPE_MODE (sizetype));
3627 emit_library_call (chkr_set_right_libfunc,
3628 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3629 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3630 TYPE_MODE (sizetype),
3631 GEN_INT (MEMORY_USE_RW),
3632 TYPE_MODE (integer_type_node));
3633 in_check_memory_usage = 0;
3638 /* If part should go in registers, copy that part
3639 into the appropriate registers. Do this now, at the end,
3640 since mem-to-mem copies above may do function calls. */
3641 if (partial > 0 && reg != 0)
3643 /* Handle calls that pass values in multiple non-contiguous locations.
3644 The Irix 6 ABI has examples of this. */
3645 if (GET_CODE (reg) == PARALLEL)
3646 emit_group_load (reg, x, -1, align); /* ??? size? */
3648 move_block_to_reg (REGNO (reg), x, partial, mode);
3651 if (extra && args_addr == 0 && where_pad == stack_direction)
3652 anti_adjust_stack (GEN_INT (extra));
3654 if (alignment_pad && args_addr == 0)
3655 anti_adjust_stack (alignment_pad);
3658 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3666 /* Only registers can be subtargets. */
3667 || GET_CODE (x) != REG
3668 /* If the register is readonly, it can't be set more than once. */
3669 || RTX_UNCHANGING_P (x)
3670 /* Don't use hard regs to avoid extending their life. */
3671 || REGNO (x) < FIRST_PSEUDO_REGISTER
3672 /* Avoid subtargets inside loops,
3673 since they hide some invariant expressions. */
3674 || preserve_subexpressions_p ())
3678 /* Expand an assignment that stores the value of FROM into TO.
3679 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3680 (This may contain a QUEUED rtx;
3681 if the value is constant, this rtx is a constant.)
3682 Otherwise, the returned value is NULL_RTX.
3684 SUGGEST_REG is no longer actually used.
3685 It used to mean, copy the value through a register
3686 and return that register, if that is possible.
3687 We now use WANT_VALUE to decide whether to do this. */
3690 expand_assignment (to, from, want_value, suggest_reg)
3693 int suggest_reg ATTRIBUTE_UNUSED;
3698 /* Don't crash if the lhs of the assignment was erroneous. */
3700 if (TREE_CODE (to) == ERROR_MARK)
3702 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3703 return want_value ? result : NULL_RTX;
3706 /* Assignment of a structure component needs special treatment
3707 if the structure component's rtx is not simply a MEM.
3708 Assignment of an array element at a constant index, and assignment of
3709 an array element in an unaligned packed structure field, has the same
3712 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3713 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3715 enum machine_mode mode1;
3716 HOST_WIDE_INT bitsize, bitpos;
3721 unsigned int alignment;
3724 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3725 &unsignedp, &volatilep, &alignment);
3727 /* If we are going to use store_bit_field and extract_bit_field,
3728 make sure to_rtx will be safe for multiple use. */
3730 if (mode1 == VOIDmode && want_value)
3731 tem = stabilize_reference (tem);
3733 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3736 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3738 if (GET_CODE (to_rtx) != MEM)
3741 if (GET_MODE (offset_rtx) != ptr_mode)
3742 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3744 #ifdef POINTERS_EXTEND_UNSIGNED
3745 if (GET_MODE (offset_rtx) != Pmode)
3746 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3749 /* A constant address in TO_RTX can have VOIDmode, we must not try
3750 to call force_reg for that case. Avoid that case. */
3751 if (GET_CODE (to_rtx) == MEM
3752 && GET_MODE (to_rtx) == BLKmode
3753 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3755 && (bitpos % bitsize) == 0
3756 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3757 && alignment == GET_MODE_ALIGNMENT (mode1))
3760 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3762 if (GET_CODE (XEXP (temp, 0)) == REG)
3765 to_rtx = (replace_equiv_address
3766 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3771 to_rtx = offset_address (to_rtx, offset_rtx,
3772 highest_pow2_factor (offset));
3777 if (GET_CODE (to_rtx) == MEM)
3779 /* When the offset is zero, to_rtx is the address of the
3780 structure we are storing into, and hence may be shared.
3781 We must make a new MEM before setting the volatile bit. */
3783 to_rtx = copy_rtx (to_rtx);
3785 MEM_VOLATILE_P (to_rtx) = 1;
3787 #if 0 /* This was turned off because, when a field is volatile
3788 in an object which is not volatile, the object may be in a register,
3789 and then we would abort over here. */
3795 if (TREE_CODE (to) == COMPONENT_REF
3796 && TREE_READONLY (TREE_OPERAND (to, 1)))
3799 to_rtx = copy_rtx (to_rtx);
3801 RTX_UNCHANGING_P (to_rtx) = 1;
3804 /* Check the access. */
3805 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3810 enum machine_mode best_mode;
3812 best_mode = get_best_mode (bitsize, bitpos,
3813 TYPE_ALIGN (TREE_TYPE (tem)),
3815 if (best_mode == VOIDmode)
3818 best_mode_size = GET_MODE_BITSIZE (best_mode);
3819 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3820 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3821 size *= GET_MODE_SIZE (best_mode);
3823 /* Check the access right of the pointer. */
3824 in_check_memory_usage = 1;
3826 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3827 VOIDmode, 3, to_addr, Pmode,
3828 GEN_INT (size), TYPE_MODE (sizetype),
3829 GEN_INT (MEMORY_USE_WO),
3830 TYPE_MODE (integer_type_node));
3831 in_check_memory_usage = 0;
3834 /* If this is a varying-length object, we must get the address of
3835 the source and do an explicit block move. */
3838 unsigned int from_align;
3839 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3841 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3843 emit_block_move (inner_to_rtx, from_rtx, expr_size (from));
3851 if (! can_address_p (to))
3853 to_rtx = copy_rtx (to_rtx);
3854 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3857 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3859 /* Spurious cast for HPUX compiler. */
3860 ? ((enum machine_mode)
3861 TYPE_MODE (TREE_TYPE (to)))
3865 int_size_in_bytes (TREE_TYPE (tem)),
3866 get_alias_set (to));
3868 preserve_temp_slots (result);
3872 /* If the value is meaningful, convert RESULT to the proper mode.
3873 Otherwise, return nothing. */
3874 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3875 TYPE_MODE (TREE_TYPE (from)),
3877 TREE_UNSIGNED (TREE_TYPE (to)))
3882 /* If the rhs is a function call and its value is not an aggregate,
3883 call the function before we start to compute the lhs.
3884 This is needed for correct code for cases such as
3885 val = setjmp (buf) on machines where reference to val
3886 requires loading up part of an address in a separate insn.
3888 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3889 since it might be a promoted variable where the zero- or sign- extension
3890 needs to be done. Handling this in the normal way is safe because no
3891 computation is done before the call. */
3892 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3893 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3894 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3895 && GET_CODE (DECL_RTL (to)) == REG))
3900 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3902 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3904 /* Handle calls that return values in multiple non-contiguous locations.
3905 The Irix 6 ABI has examples of this. */
3906 if (GET_CODE (to_rtx) == PARALLEL)
3907 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3908 TYPE_ALIGN (TREE_TYPE (from)));
3909 else if (GET_MODE (to_rtx) == BLKmode)
3910 emit_block_move (to_rtx, value, expr_size (from));
3913 #ifdef POINTERS_EXTEND_UNSIGNED
3914 if (POINTER_TYPE_P (TREE_TYPE (to))
3915 && GET_MODE (to_rtx) != GET_MODE (value))
3916 value = convert_memory_address (GET_MODE (to_rtx), value);
3918 emit_move_insn (to_rtx, value);
3920 preserve_temp_slots (to_rtx);
3923 return want_value ? to_rtx : NULL_RTX;
3926 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3927 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3930 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3932 /* Don't move directly into a return register. */
3933 if (TREE_CODE (to) == RESULT_DECL
3934 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3939 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3941 if (GET_CODE (to_rtx) == PARALLEL)
3942 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3943 TYPE_ALIGN (TREE_TYPE (from)));
3945 emit_move_insn (to_rtx, temp);
3947 preserve_temp_slots (to_rtx);
3950 return want_value ? to_rtx : NULL_RTX;
3953 /* In case we are returning the contents of an object which overlaps
3954 the place the value is being stored, use a safe function when copying
3955 a value through a pointer into a structure value return block. */
3956 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3957 && current_function_returns_struct
3958 && !current_function_returns_pcc_struct)
3963 size = expr_size (from);
3964 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3965 EXPAND_MEMORY_USE_DONT);
3967 /* Copy the rights of the bitmap. */
3968 if (current_function_check_memory_usage)
3969 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3970 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3971 XEXP (from_rtx, 0), Pmode,
3972 convert_to_mode (TYPE_MODE (sizetype),
3973 size, TREE_UNSIGNED (sizetype)),
3974 TYPE_MODE (sizetype));
3976 #ifdef TARGET_MEM_FUNCTIONS
3977 emit_library_call (memmove_libfunc, LCT_NORMAL,
3978 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3979 XEXP (from_rtx, 0), Pmode,
3980 convert_to_mode (TYPE_MODE (sizetype),
3981 size, TREE_UNSIGNED (sizetype)),
3982 TYPE_MODE (sizetype));
3984 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3985 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3986 XEXP (to_rtx, 0), Pmode,
3987 convert_to_mode (TYPE_MODE (integer_type_node),
3988 size, TREE_UNSIGNED (integer_type_node)),
3989 TYPE_MODE (integer_type_node));
3992 preserve_temp_slots (to_rtx);
3995 return want_value ? to_rtx : NULL_RTX;
3998 /* Compute FROM and store the value in the rtx we got. */
4001 result = store_expr (from, to_rtx, want_value);
4002 preserve_temp_slots (result);
4005 return want_value ? result : NULL_RTX;
4008 /* Generate code for computing expression EXP,
4009 and storing the value into TARGET.
4010 TARGET may contain a QUEUED rtx.
4012 If WANT_VALUE is nonzero, return a copy of the value
4013 not in TARGET, so that we can be sure to use the proper
4014 value in a containing expression even if TARGET has something
4015 else stored in it. If possible, we copy the value through a pseudo
4016 and return that pseudo. Or, if the value is constant, we try to
4017 return the constant. In some cases, we return a pseudo
4018 copied *from* TARGET.
4020 If the mode is BLKmode then we may return TARGET itself.
4021 It turns out that in BLKmode it doesn't cause a problem.
4022 because C has no operators that could combine two different
4023 assignments into the same BLKmode object with different values
4024 with no sequence point. Will other languages need this to
4027 If WANT_VALUE is 0, we return NULL, to make sure
4028 to catch quickly any cases where the caller uses the value
4029 and fails to set WANT_VALUE. */
4032 store_expr (exp, target, want_value)
4038 int dont_return_target = 0;
4039 int dont_store_target = 0;
4041 if (TREE_CODE (exp) == COMPOUND_EXPR)
4043 /* Perform first part of compound expression, then assign from second
4045 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4047 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4049 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4051 /* For conditional expression, get safe form of the target. Then
4052 test the condition, doing the appropriate assignment on either
4053 side. This avoids the creation of unnecessary temporaries.
4054 For non-BLKmode, it is more efficient not to do this. */
4056 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4059 target = protect_from_queue (target, 1);
4061 do_pending_stack_adjust ();
4063 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4064 start_cleanup_deferral ();
4065 store_expr (TREE_OPERAND (exp, 1), target, 0);
4066 end_cleanup_deferral ();
4068 emit_jump_insn (gen_jump (lab2));
4071 start_cleanup_deferral ();
4072 store_expr (TREE_OPERAND (exp, 2), target, 0);
4073 end_cleanup_deferral ();
4078 return want_value ? target : NULL_RTX;
4080 else if (queued_subexp_p (target))
4081 /* If target contains a postincrement, let's not risk
4082 using it as the place to generate the rhs. */
4084 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4086 /* Expand EXP into a new pseudo. */
4087 temp = gen_reg_rtx (GET_MODE (target));
4088 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4091 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4093 /* If target is volatile, ANSI requires accessing the value
4094 *from* the target, if it is accessed. So make that happen.
4095 In no case return the target itself. */
4096 if (! MEM_VOLATILE_P (target) && want_value)
4097 dont_return_target = 1;
4099 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4100 && GET_MODE (target) != BLKmode)
4101 /* If target is in memory and caller wants value in a register instead,
4102 arrange that. Pass TARGET as target for expand_expr so that,
4103 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4104 We know expand_expr will not use the target in that case.
4105 Don't do this if TARGET is volatile because we are supposed
4106 to write it and then read it. */
4108 temp = expand_expr (exp, target, GET_MODE (target), 0);
4109 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4111 /* If TEMP is already in the desired TARGET, only copy it from
4112 memory and don't store it there again. */
4114 || (rtx_equal_p (temp, target)
4115 && ! side_effects_p (temp) && ! side_effects_p (target)))
4116 dont_store_target = 1;
4117 temp = copy_to_reg (temp);
4119 dont_return_target = 1;
4121 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4122 /* If this is an scalar in a register that is stored in a wider mode
4123 than the declared mode, compute the result into its declared mode
4124 and then convert to the wider mode. Our value is the computed
4127 /* If we don't want a value, we can do the conversion inside EXP,
4128 which will often result in some optimizations. Do the conversion
4129 in two steps: first change the signedness, if needed, then
4130 the extend. But don't do this if the type of EXP is a subtype
4131 of something else since then the conversion might involve
4132 more than just converting modes. */
4133 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4134 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4136 if (TREE_UNSIGNED (TREE_TYPE (exp))
4137 != SUBREG_PROMOTED_UNSIGNED_P (target))
4140 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4144 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4145 SUBREG_PROMOTED_UNSIGNED_P (target)),
4149 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4151 /* If TEMP is a volatile MEM and we want a result value, make
4152 the access now so it gets done only once. Likewise if
4153 it contains TARGET. */
4154 if (GET_CODE (temp) == MEM && want_value
4155 && (MEM_VOLATILE_P (temp)
4156 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4157 temp = copy_to_reg (temp);
4159 /* If TEMP is a VOIDmode constant, use convert_modes to make
4160 sure that we properly convert it. */
4161 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4163 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4164 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4165 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4166 GET_MODE (target), temp,
4167 SUBREG_PROMOTED_UNSIGNED_P (target));
4170 convert_move (SUBREG_REG (target), temp,
4171 SUBREG_PROMOTED_UNSIGNED_P (target));
4173 /* If we promoted a constant, change the mode back down to match
4174 target. Otherwise, the caller might get confused by a result whose
4175 mode is larger than expected. */
4177 if (want_value && GET_MODE (temp) != GET_MODE (target)
4178 && GET_MODE (temp) != VOIDmode)
4180 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4181 SUBREG_PROMOTED_VAR_P (temp) = 1;
4182 SUBREG_PROMOTED_UNSIGNED_P (temp)
4183 = SUBREG_PROMOTED_UNSIGNED_P (target);
4186 return want_value ? temp : NULL_RTX;
4190 temp = expand_expr (exp, target, GET_MODE (target), 0);
4191 /* Return TARGET if it's a specified hardware register.
4192 If TARGET is a volatile mem ref, either return TARGET
4193 or return a reg copied *from* TARGET; ANSI requires this.
4195 Otherwise, if TEMP is not TARGET, return TEMP
4196 if it is constant (for efficiency),
4197 or if we really want the correct value. */
4198 if (!(target && GET_CODE (target) == REG
4199 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4200 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4201 && ! rtx_equal_p (temp, target)
4202 && (CONSTANT_P (temp) || want_value))
4203 dont_return_target = 1;
4206 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4207 the same as that of TARGET, adjust the constant. This is needed, for
4208 example, in case it is a CONST_DOUBLE and we want only a word-sized
4210 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4211 && TREE_CODE (exp) != ERROR_MARK
4212 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4213 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4214 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4216 if (current_function_check_memory_usage
4217 && GET_CODE (target) == MEM
4218 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4220 in_check_memory_usage = 1;
4221 if (GET_CODE (temp) == MEM)
4222 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4223 VOIDmode, 3, XEXP (target, 0), Pmode,
4224 XEXP (temp, 0), Pmode,
4225 expr_size (exp), TYPE_MODE (sizetype));
4227 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4228 VOIDmode, 3, XEXP (target, 0), Pmode,
4229 expr_size (exp), TYPE_MODE (sizetype),
4230 GEN_INT (MEMORY_USE_WO),
4231 TYPE_MODE (integer_type_node));
4232 in_check_memory_usage = 0;
4235 /* If value was not generated in the target, store it there.
4236 Convert the value to TARGET's type first if nec. */
4237 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4238 one or both of them are volatile memory refs, we have to distinguish
4240 - expand_expr has used TARGET. In this case, we must not generate
4241 another copy. This can be detected by TARGET being equal according
4243 - expand_expr has not used TARGET - that means that the source just
4244 happens to have the same RTX form. Since temp will have been created
4245 by expand_expr, it will compare unequal according to == .
4246 We must generate a copy in this case, to reach the correct number
4247 of volatile memory references. */
4249 if ((! rtx_equal_p (temp, target)
4250 || (temp != target && (side_effects_p (temp)
4251 || side_effects_p (target))))
4252 && TREE_CODE (exp) != ERROR_MARK
4253 && ! dont_store_target)
4255 target = protect_from_queue (target, 1);
4256 if (GET_MODE (temp) != GET_MODE (target)
4257 && GET_MODE (temp) != VOIDmode)
4259 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4260 if (dont_return_target)
4262 /* In this case, we will return TEMP,
4263 so make sure it has the proper mode.
4264 But don't forget to store the value into TARGET. */
4265 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4266 emit_move_insn (target, temp);
4269 convert_move (target, temp, unsignedp);
4272 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4274 /* Handle copying a string constant into an array.
4275 The string constant may be shorter than the array.
4276 So copy just the string's actual length, and clear the rest. */
4280 /* Get the size of the data type of the string,
4281 which is actually the size of the target. */
4282 size = expr_size (exp);
4283 if (GET_CODE (size) == CONST_INT
4284 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4285 emit_block_move (target, temp, size);
4288 /* Compute the size of the data to copy from the string. */
4290 = size_binop (MIN_EXPR,
4291 make_tree (sizetype, size),
4292 size_int (TREE_STRING_LENGTH (exp)));
4293 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4297 /* Copy that much. */
4298 emit_block_move (target, temp, copy_size_rtx);
4300 /* Figure out how much is left in TARGET that we have to clear.
4301 Do all calculations in ptr_mode. */
4303 addr = XEXP (target, 0);
4304 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4306 if (GET_CODE (copy_size_rtx) == CONST_INT)
4308 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4309 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4313 addr = force_reg (ptr_mode, addr);
4314 addr = expand_binop (ptr_mode, add_optab, addr,
4315 copy_size_rtx, NULL_RTX, 0,
4318 size = expand_binop (ptr_mode, sub_optab, size,
4319 copy_size_rtx, NULL_RTX, 0,
4322 label = gen_label_rtx ();
4323 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4324 GET_MODE (size), 0, 0, label);
4327 if (size != const0_rtx)
4329 rtx dest = gen_rtx_MEM (BLKmode, addr);
4331 MEM_COPY_ATTRIBUTES (dest, target);
4333 /* Be sure we can write on ADDR. */
4334 in_check_memory_usage = 1;
4335 if (current_function_check_memory_usage)
4336 emit_library_call (chkr_check_addr_libfunc,
4337 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4339 size, TYPE_MODE (sizetype),
4340 GEN_INT (MEMORY_USE_WO),
4341 TYPE_MODE (integer_type_node));
4342 in_check_memory_usage = 0;
4343 clear_storage (dest, size);
4350 /* Handle calls that return values in multiple non-contiguous locations.
4351 The Irix 6 ABI has examples of this. */
4352 else if (GET_CODE (target) == PARALLEL)
4353 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4354 TYPE_ALIGN (TREE_TYPE (exp)));
4355 else if (GET_MODE (temp) == BLKmode)
4356 emit_block_move (target, temp, expr_size (exp));
4358 emit_move_insn (target, temp);
4361 /* If we don't want a value, return NULL_RTX. */
4365 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4366 ??? The latter test doesn't seem to make sense. */
4367 else if (dont_return_target && GET_CODE (temp) != MEM)
4370 /* Return TARGET itself if it is a hard register. */
4371 else if (want_value && GET_MODE (target) != BLKmode
4372 && ! (GET_CODE (target) == REG
4373 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4374 return copy_to_reg (target);
4380 /* Return 1 if EXP just contains zeros. */
4388 switch (TREE_CODE (exp))
4392 case NON_LVALUE_EXPR:
4393 return is_zeros_p (TREE_OPERAND (exp, 0));
4396 return integer_zerop (exp);
4400 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4403 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4406 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4407 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4408 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4409 if (! is_zeros_p (TREE_VALUE (elt)))
4419 /* Return 1 if EXP contains mostly (3/4) zeros. */
4422 mostly_zeros_p (exp)
4425 if (TREE_CODE (exp) == CONSTRUCTOR)
4427 int elts = 0, zeros = 0;
4428 tree elt = CONSTRUCTOR_ELTS (exp);
4429 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4431 /* If there are no ranges of true bits, it is all zero. */
4432 return elt == NULL_TREE;
4434 for (; elt; elt = TREE_CHAIN (elt))
4436 /* We do not handle the case where the index is a RANGE_EXPR,
4437 so the statistic will be somewhat inaccurate.
4438 We do make a more accurate count in store_constructor itself,
4439 so since this function is only used for nested array elements,
4440 this should be close enough. */
4441 if (mostly_zeros_p (TREE_VALUE (elt)))
4446 return 4 * zeros >= 3 * elts;
4449 return is_zeros_p (exp);
4452 /* Helper function for store_constructor.
4453 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4454 TYPE is the type of the CONSTRUCTOR, not the element type.
4455 ALIGN and CLEARED are as for store_constructor.
4456 ALIAS_SET is the alias set to use for any stores.
4458 This provides a recursive shortcut back to store_constructor when it isn't
4459 necessary to go through store_field. This is so that we can pass through
4460 the cleared field to let store_constructor know that we may not have to
4461 clear a substructure if the outer structure has already been cleared. */
4464 store_constructor_field (target, bitsize, bitpos,
4465 mode, exp, type, align, cleared, alias_set)
4467 unsigned HOST_WIDE_INT bitsize;
4468 HOST_WIDE_INT bitpos;
4469 enum machine_mode mode;
4475 if (TREE_CODE (exp) == CONSTRUCTOR
4476 && bitpos % BITS_PER_UNIT == 0
4477 /* If we have a non-zero bitpos for a register target, then we just
4478 let store_field do the bitfield handling. This is unlikely to
4479 generate unnecessary clear instructions anyways. */
4480 && (bitpos == 0 || GET_CODE (target) == MEM))
4482 if (GET_CODE (target) == MEM)
4484 = adjust_address (target,
4485 GET_MODE (target) == BLKmode
4487 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4488 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4491 /* Show the alignment may no longer be what it was and update the alias
4492 set, if required. */
4494 align = MIN (align, (unsigned int) bitpos & - bitpos);
4496 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4497 && MEM_ALIAS_SET (target) != 0)
4499 target = copy_rtx (target);
4500 set_mem_alias_set (target, alias_set);
4503 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4506 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4507 int_size_in_bytes (type), alias_set);
4510 /* Store the value of constructor EXP into the rtx TARGET.
4511 TARGET is either a REG or a MEM.
4512 ALIGN is the maximum known alignment for TARGET.
4513 CLEARED is true if TARGET is known to have been zero'd.
4514 SIZE is the number of bytes of TARGET we are allowed to modify: this
4515 may not be the same as the size of EXP if we are assigning to a field
4516 which has been packed to exclude padding bits. */
4519 store_constructor (exp, target, align, cleared, size)
4526 tree type = TREE_TYPE (exp);
4527 #ifdef WORD_REGISTER_OPERATIONS
4528 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4531 /* We know our target cannot conflict, since safe_from_p has been called. */
4533 /* Don't try copying piece by piece into a hard register
4534 since that is vulnerable to being clobbered by EXP.
4535 Instead, construct in a pseudo register and then copy it all. */
4536 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4538 rtx temp = gen_reg_rtx (GET_MODE (target));
4539 store_constructor (exp, temp, align, cleared, size);
4540 emit_move_insn (target, temp);
4545 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4546 || TREE_CODE (type) == QUAL_UNION_TYPE)
4550 /* Inform later passes that the whole union value is dead. */
4551 if ((TREE_CODE (type) == UNION_TYPE
4552 || TREE_CODE (type) == QUAL_UNION_TYPE)
4555 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4557 /* If the constructor is empty, clear the union. */
4558 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4559 clear_storage (target, expr_size (exp));
4562 /* If we are building a static constructor into a register,
4563 set the initial value as zero so we can fold the value into
4564 a constant. But if more than one register is involved,
4565 this probably loses. */
4566 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4567 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4570 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4575 /* If the constructor has fewer fields than the structure
4576 or if we are initializing the structure to mostly zeros,
4577 clear the whole structure first. Don't do this if TARGET is a
4578 register whose mode size isn't equal to SIZE since clear_storage
4579 can't handle this case. */
4581 && ((list_length (CONSTRUCTOR_ELTS (exp))
4582 != fields_length (type))
4583 || mostly_zeros_p (exp))
4584 && (GET_CODE (target) != REG
4585 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4588 clear_storage (target, GEN_INT (size));
4593 /* Inform later passes that the old value is dead. */
4594 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4596 /* Store each element of the constructor into
4597 the corresponding field of TARGET. */
4599 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4601 tree field = TREE_PURPOSE (elt);
4602 #ifdef WORD_REGISTER_OPERATIONS
4603 tree value = TREE_VALUE (elt);
4605 enum machine_mode mode;
4606 HOST_WIDE_INT bitsize;
4607 HOST_WIDE_INT bitpos = 0;
4610 rtx to_rtx = target;
4612 /* Just ignore missing fields.
4613 We cleared the whole structure, above,
4614 if any fields are missing. */
4618 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4621 if (host_integerp (DECL_SIZE (field), 1))
4622 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4626 unsignedp = TREE_UNSIGNED (field);
4627 mode = DECL_MODE (field);
4628 if (DECL_BIT_FIELD (field))
4631 offset = DECL_FIELD_OFFSET (field);
4632 if (host_integerp (offset, 0)
4633 && host_integerp (bit_position (field), 0))
4635 bitpos = int_bit_position (field);
4639 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4645 if (contains_placeholder_p (offset))
4646 offset = build (WITH_RECORD_EXPR, sizetype,
4647 offset, make_tree (TREE_TYPE (exp), target));
4649 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4650 if (GET_CODE (to_rtx) != MEM)
4653 if (GET_MODE (offset_rtx) != ptr_mode)
4654 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4656 #ifdef POINTERS_EXTEND_UNSIGNED
4657 if (GET_MODE (offset_rtx) != Pmode)
4658 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4661 to_rtx = offset_address (to_rtx, offset_rtx,
4662 highest_pow2_factor (offset));
4664 align = DECL_OFFSET_ALIGN (field);
4667 if (TREE_READONLY (field))
4669 if (GET_CODE (to_rtx) == MEM)
4670 to_rtx = copy_rtx (to_rtx);
4672 RTX_UNCHANGING_P (to_rtx) = 1;
4675 #ifdef WORD_REGISTER_OPERATIONS
4676 /* If this initializes a field that is smaller than a word, at the
4677 start of a word, try to widen it to a full word.
4678 This special case allows us to output C++ member function
4679 initializations in a form that the optimizers can understand. */
4680 if (GET_CODE (target) == REG
4681 && bitsize < BITS_PER_WORD
4682 && bitpos % BITS_PER_WORD == 0
4683 && GET_MODE_CLASS (mode) == MODE_INT
4684 && TREE_CODE (value) == INTEGER_CST
4686 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4688 tree type = TREE_TYPE (value);
4689 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4691 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4692 value = convert (type, value);
4694 if (BYTES_BIG_ENDIAN)
4696 = fold (build (LSHIFT_EXPR, type, value,
4697 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4698 bitsize = BITS_PER_WORD;
4703 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4704 && DECL_NONADDRESSABLE_P (field))
4706 to_rtx = copy_rtx (to_rtx);
4707 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4710 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4711 TREE_VALUE (elt), type, align, cleared,
4712 get_alias_set (TREE_TYPE (field)));
4715 else if (TREE_CODE (type) == ARRAY_TYPE)
4720 tree domain = TYPE_DOMAIN (type);
4721 tree elttype = TREE_TYPE (type);
4722 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4723 && TYPE_MAX_VALUE (domain)
4724 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4725 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4726 HOST_WIDE_INT minelt = 0;
4727 HOST_WIDE_INT maxelt = 0;
4729 /* If we have constant bounds for the range of the type, get them. */
4732 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4733 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4736 /* If the constructor has fewer elements than the array,
4737 clear the whole array first. Similarly if this is
4738 static constructor of a non-BLKmode object. */
4739 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4743 HOST_WIDE_INT count = 0, zero_count = 0;
4744 need_to_clear = ! const_bounds_p;
4746 /* This loop is a more accurate version of the loop in
4747 mostly_zeros_p (it handles RANGE_EXPR in an index).
4748 It is also needed to check for missing elements. */
4749 for (elt = CONSTRUCTOR_ELTS (exp);
4750 elt != NULL_TREE && ! need_to_clear;
4751 elt = TREE_CHAIN (elt))
4753 tree index = TREE_PURPOSE (elt);
4754 HOST_WIDE_INT this_node_count;
4756 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4758 tree lo_index = TREE_OPERAND (index, 0);
4759 tree hi_index = TREE_OPERAND (index, 1);
4761 if (! host_integerp (lo_index, 1)
4762 || ! host_integerp (hi_index, 1))
4768 this_node_count = (tree_low_cst (hi_index, 1)
4769 - tree_low_cst (lo_index, 1) + 1);
4772 this_node_count = 1;
4774 count += this_node_count;
4775 if (mostly_zeros_p (TREE_VALUE (elt)))
4776 zero_count += this_node_count;
4779 /* Clear the entire array first if there are any missing elements,
4780 or if the incidence of zero elements is >= 75%. */
4782 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4786 if (need_to_clear && size > 0)
4789 clear_storage (target, GEN_INT (size));
4792 else if (REG_P (target))
4793 /* Inform later passes that the old value is dead. */
4794 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4796 /* Store each element of the constructor into
4797 the corresponding element of TARGET, determined
4798 by counting the elements. */
4799 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4801 elt = TREE_CHAIN (elt), i++)
4803 enum machine_mode mode;
4804 HOST_WIDE_INT bitsize;
4805 HOST_WIDE_INT bitpos;
4807 tree value = TREE_VALUE (elt);
4808 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4809 tree index = TREE_PURPOSE (elt);
4810 rtx xtarget = target;
4812 if (cleared && is_zeros_p (value))
4815 unsignedp = TREE_UNSIGNED (elttype);
4816 mode = TYPE_MODE (elttype);
4817 if (mode == BLKmode)
4818 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4819 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4822 bitsize = GET_MODE_BITSIZE (mode);
4824 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4826 tree lo_index = TREE_OPERAND (index, 0);
4827 tree hi_index = TREE_OPERAND (index, 1);
4828 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4829 struct nesting *loop;
4830 HOST_WIDE_INT lo, hi, count;
4833 /* If the range is constant and "small", unroll the loop. */
4835 && host_integerp (lo_index, 0)
4836 && host_integerp (hi_index, 0)
4837 && (lo = tree_low_cst (lo_index, 0),
4838 hi = tree_low_cst (hi_index, 0),
4839 count = hi - lo + 1,
4840 (GET_CODE (target) != MEM
4842 || (host_integerp (TYPE_SIZE (elttype), 1)
4843 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4846 lo -= minelt; hi -= minelt;
4847 for (; lo <= hi; lo++)
4849 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4851 if (GET_CODE (target) == MEM
4852 && !MEM_KEEP_ALIAS_SET_P (target)
4853 && TYPE_NONALIASED_COMPONENT (type))
4855 target = copy_rtx (target);
4856 MEM_KEEP_ALIAS_SET_P (target) = 1;
4859 store_constructor_field
4860 (target, bitsize, bitpos, mode, value, type, align,
4861 cleared, get_alias_set (elttype));
4866 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4867 loop_top = gen_label_rtx ();
4868 loop_end = gen_label_rtx ();
4870 unsignedp = TREE_UNSIGNED (domain);
4872 index = build_decl (VAR_DECL, NULL_TREE, domain);
4875 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4877 SET_DECL_RTL (index, index_r);
4878 if (TREE_CODE (value) == SAVE_EXPR
4879 && SAVE_EXPR_RTL (value) == 0)
4881 /* Make sure value gets expanded once before the
4883 expand_expr (value, const0_rtx, VOIDmode, 0);
4886 store_expr (lo_index, index_r, 0);
4887 loop = expand_start_loop (0);
4889 /* Assign value to element index. */
4891 = convert (ssizetype,
4892 fold (build (MINUS_EXPR, TREE_TYPE (index),
4893 index, TYPE_MIN_VALUE (domain))));
4894 position = size_binop (MULT_EXPR, position,
4896 TYPE_SIZE_UNIT (elttype)));
4898 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4899 xtarget = offset_address (target, pos_rtx,
4900 highest_pow2_factor (position));
4901 xtarget = adjust_address (xtarget, mode, 0);
4902 if (TREE_CODE (value) == CONSTRUCTOR)
4903 store_constructor (value, xtarget, align, cleared,
4904 bitsize / BITS_PER_UNIT);
4906 store_expr (value, xtarget, 0);
4908 expand_exit_loop_if_false (loop,
4909 build (LT_EXPR, integer_type_node,
4912 expand_increment (build (PREINCREMENT_EXPR,
4914 index, integer_one_node), 0, 0);
4916 emit_label (loop_end);
4919 else if ((index != 0 && ! host_integerp (index, 0))
4920 || ! host_integerp (TYPE_SIZE (elttype), 1))
4925 index = ssize_int (1);
4928 index = convert (ssizetype,
4929 fold (build (MINUS_EXPR, index,
4930 TYPE_MIN_VALUE (domain))));
4932 position = size_binop (MULT_EXPR, index,
4934 TYPE_SIZE_UNIT (elttype)));
4935 xtarget = offset_address (target,
4936 expand_expr (position, 0, VOIDmode, 0),
4937 highest_pow2_factor (position));
4938 xtarget = adjust_address (xtarget, mode, 0);
4939 store_expr (value, xtarget, 0);
4944 bitpos = ((tree_low_cst (index, 0) - minelt)
4945 * tree_low_cst (TYPE_SIZE (elttype), 1));
4947 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4949 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4950 && TYPE_NONALIASED_COMPONENT (type))
4952 target = copy_rtx (target);
4953 MEM_KEEP_ALIAS_SET_P (target) = 1;
4956 store_constructor_field (target, bitsize, bitpos, mode, value,
4957 type, align, cleared,
4958 get_alias_set (elttype));
4964 /* Set constructor assignments. */
4965 else if (TREE_CODE (type) == SET_TYPE)
4967 tree elt = CONSTRUCTOR_ELTS (exp);
4968 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4969 tree domain = TYPE_DOMAIN (type);
4970 tree domain_min, domain_max, bitlength;
4972 /* The default implementation strategy is to extract the constant
4973 parts of the constructor, use that to initialize the target,
4974 and then "or" in whatever non-constant ranges we need in addition.
4976 If a large set is all zero or all ones, it is
4977 probably better to set it using memset (if available) or bzero.
4978 Also, if a large set has just a single range, it may also be
4979 better to first clear all the first clear the set (using
4980 bzero/memset), and set the bits we want. */
4982 /* Check for all zeros. */
4983 if (elt == NULL_TREE && size > 0)
4986 clear_storage (target, GEN_INT (size));
4990 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4991 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4992 bitlength = size_binop (PLUS_EXPR,
4993 size_diffop (domain_max, domain_min),
4996 nbits = tree_low_cst (bitlength, 1);
4998 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4999 are "complicated" (more than one range), initialize (the
5000 constant parts) by copying from a constant. */
5001 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5002 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5004 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5005 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5006 char *bit_buffer = (char *) alloca (nbits);
5007 HOST_WIDE_INT word = 0;
5008 unsigned int bit_pos = 0;
5009 unsigned int ibit = 0;
5010 unsigned int offset = 0; /* In bytes from beginning of set. */
5012 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5015 if (bit_buffer[ibit])
5017 if (BYTES_BIG_ENDIAN)
5018 word |= (1 << (set_word_size - 1 - bit_pos));
5020 word |= 1 << bit_pos;
5024 if (bit_pos >= set_word_size || ibit == nbits)
5026 if (word != 0 || ! cleared)
5028 rtx datum = GEN_INT (word);
5031 /* The assumption here is that it is safe to use
5032 XEXP if the set is multi-word, but not if
5033 it's single-word. */
5034 if (GET_CODE (target) == MEM)
5035 to_rtx = adjust_address (target, mode, offset);
5036 else if (offset == 0)
5040 emit_move_insn (to_rtx, datum);
5047 offset += set_word_size / BITS_PER_UNIT;
5052 /* Don't bother clearing storage if the set is all ones. */
5053 if (TREE_CHAIN (elt) != NULL_TREE
5054 || (TREE_PURPOSE (elt) == NULL_TREE
5056 : ( ! host_integerp (TREE_VALUE (elt), 0)
5057 || ! host_integerp (TREE_PURPOSE (elt), 0)
5058 || (tree_low_cst (TREE_VALUE (elt), 0)
5059 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5060 != (HOST_WIDE_INT) nbits))))
5061 clear_storage (target, expr_size (exp));
5063 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5065 /* Start of range of element or NULL. */
5066 tree startbit = TREE_PURPOSE (elt);
5067 /* End of range of element, or element value. */
5068 tree endbit = TREE_VALUE (elt);
5069 #ifdef TARGET_MEM_FUNCTIONS
5070 HOST_WIDE_INT startb, endb;
5072 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5074 bitlength_rtx = expand_expr (bitlength,
5075 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5077 /* Handle non-range tuple element like [ expr ]. */
5078 if (startbit == NULL_TREE)
5080 startbit = save_expr (endbit);
5084 startbit = convert (sizetype, startbit);
5085 endbit = convert (sizetype, endbit);
5086 if (! integer_zerop (domain_min))
5088 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5089 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5091 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5092 EXPAND_CONST_ADDRESS);
5093 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5094 EXPAND_CONST_ADDRESS);
5100 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5103 emit_move_insn (targetx, target);
5106 else if (GET_CODE (target) == MEM)
5111 #ifdef TARGET_MEM_FUNCTIONS
5112 /* Optimization: If startbit and endbit are
5113 constants divisible by BITS_PER_UNIT,
5114 call memset instead. */
5115 if (TREE_CODE (startbit) == INTEGER_CST
5116 && TREE_CODE (endbit) == INTEGER_CST
5117 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5118 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5120 emit_library_call (memset_libfunc, LCT_NORMAL,
5122 plus_constant (XEXP (targetx, 0),
5123 startb / BITS_PER_UNIT),
5125 constm1_rtx, TYPE_MODE (integer_type_node),
5126 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5127 TYPE_MODE (sizetype));
5131 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5132 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5133 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5134 startbit_rtx, TYPE_MODE (sizetype),
5135 endbit_rtx, TYPE_MODE (sizetype));
5138 emit_move_insn (target, targetx);
5146 /* Store the value of EXP (an expression tree)
5147 into a subfield of TARGET which has mode MODE and occupies
5148 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5149 If MODE is VOIDmode, it means that we are storing into a bit-field.
5151 If VALUE_MODE is VOIDmode, return nothing in particular.
5152 UNSIGNEDP is not used in this case.
5154 Otherwise, return an rtx for the value stored. This rtx
5155 has mode VALUE_MODE if that is convenient to do.
5156 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5158 ALIGN is the alignment that TARGET is known to have.
5159 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5161 ALIAS_SET is the alias set for the destination. This value will
5162 (in general) be different from that for TARGET, since TARGET is a
5163 reference to the containing structure. */
5166 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5167 unsignedp, align, total_size, alias_set)
5169 HOST_WIDE_INT bitsize;
5170 HOST_WIDE_INT bitpos;
5171 enum machine_mode mode;
5173 enum machine_mode value_mode;
5176 HOST_WIDE_INT total_size;
5179 HOST_WIDE_INT width_mask = 0;
5181 if (TREE_CODE (exp) == ERROR_MARK)
5184 /* If we have nothing to store, do nothing unless the expression has
5187 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5189 if (bitsize < HOST_BITS_PER_WIDE_INT)
5190 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5192 /* If we are storing into an unaligned field of an aligned union that is
5193 in a register, we may have the mode of TARGET being an integer mode but
5194 MODE == BLKmode. In that case, get an aligned object whose size and
5195 alignment are the same as TARGET and store TARGET into it (we can avoid
5196 the store if the field being stored is the entire width of TARGET). Then
5197 call ourselves recursively to store the field into a BLKmode version of
5198 that object. Finally, load from the object into TARGET. This is not
5199 very efficient in general, but should only be slightly more expensive
5200 than the otherwise-required unaligned accesses. Perhaps this can be
5201 cleaned up later. */
5204 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5208 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5211 rtx blk_object = copy_rtx (object);
5213 PUT_MODE (blk_object, BLKmode);
5214 set_mem_alias_set (blk_object, 0);
5216 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5217 emit_move_insn (object, target);
5219 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5220 align, total_size, alias_set);
5222 /* Even though we aren't returning target, we need to
5223 give it the updated value. */
5224 emit_move_insn (target, object);
5229 if (GET_CODE (target) == CONCAT)
5231 /* We're storing into a struct containing a single __complex. */
5235 return store_expr (exp, target, 0);
5238 /* If the structure is in a register or if the component
5239 is a bit field, we cannot use addressing to access it.
5240 Use bit-field techniques or SUBREG to store in it. */
5242 if (mode == VOIDmode
5243 || (mode != BLKmode && ! direct_store[(int) mode]
5244 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5245 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5246 || GET_CODE (target) == REG
5247 || GET_CODE (target) == SUBREG
5248 /* If the field isn't aligned enough to store as an ordinary memref,
5249 store it as a bit field. */
5250 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5251 && (align < GET_MODE_ALIGNMENT (mode)
5252 || bitpos % GET_MODE_ALIGNMENT (mode)))
5253 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5254 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5255 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5256 /* If the RHS and field are a constant size and the size of the
5257 RHS isn't the same size as the bitfield, we must use bitfield
5260 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5261 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5263 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5265 /* If BITSIZE is narrower than the size of the type of EXP
5266 we will be narrowing TEMP. Normally, what's wanted are the
5267 low-order bits. However, if EXP's type is a record and this is
5268 big-endian machine, we want the upper BITSIZE bits. */
5269 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5270 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5271 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5272 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5273 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5277 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5279 if (mode != VOIDmode && mode != BLKmode
5280 && mode != TYPE_MODE (TREE_TYPE (exp)))
5281 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5283 /* If the modes of TARGET and TEMP are both BLKmode, both
5284 must be in memory and BITPOS must be aligned on a byte
5285 boundary. If so, we simply do a block copy. */
5286 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5288 unsigned int exp_align = expr_align (exp);
5290 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5291 || bitpos % BITS_PER_UNIT != 0)
5294 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5296 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5297 align = MIN (exp_align, align);
5299 /* Find an alignment that is consistent with the bit position. */
5300 while ((bitpos % align) != 0)
5303 emit_block_move (target, temp,
5304 bitsize == -1 ? expr_size (exp)
5305 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5308 return value_mode == VOIDmode ? const0_rtx : target;
5311 /* Store the value in the bitfield. */
5312 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5313 if (value_mode != VOIDmode)
5315 /* The caller wants an rtx for the value. */
5316 /* If possible, avoid refetching from the bitfield itself. */
5318 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5321 enum machine_mode tmode;
5324 return expand_and (temp,
5328 GET_MODE (temp) == VOIDmode
5330 : GET_MODE (temp))), NULL_RTX);
5331 tmode = GET_MODE (temp);
5332 if (tmode == VOIDmode)
5334 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5335 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5336 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5338 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5339 NULL_RTX, value_mode, 0, align,
5346 rtx addr = XEXP (target, 0);
5349 /* If a value is wanted, it must be the lhs;
5350 so make the address stable for multiple use. */
5352 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5353 && ! CONSTANT_ADDRESS_P (addr)
5354 /* A frame-pointer reference is already stable. */
5355 && ! (GET_CODE (addr) == PLUS
5356 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5357 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5358 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5359 target = replace_equiv_address (target, copy_to_reg (addr));
5361 /* Now build a reference to just the desired component. */
5363 to_rtx = copy_rtx (adjust_address (target, mode,
5364 bitpos / BITS_PER_UNIT));
5366 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5367 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5369 to_rtx = copy_rtx (to_rtx);
5370 set_mem_alias_set (to_rtx, alias_set);
5373 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5377 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5378 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5379 codes and find the ultimate containing object, which we return.
5381 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5382 bit position, and *PUNSIGNEDP to the signedness of the field.
5383 If the position of the field is variable, we store a tree
5384 giving the variable offset (in units) in *POFFSET.
5385 This offset is in addition to the bit position.
5386 If the position is not variable, we store 0 in *POFFSET.
5387 We set *PALIGNMENT to the alignment of the address that will be
5388 computed. This is the alignment of the thing we return if *POFFSET
5389 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5391 If any of the extraction expressions is volatile,
5392 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5394 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5395 is a mode that can be used to access the field. In that case, *PBITSIZE
5398 If the field describes a variable-sized object, *PMODE is set to
5399 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5400 this case, but the address of the object can be found. */
5403 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5404 punsignedp, pvolatilep, palignment)
5406 HOST_WIDE_INT *pbitsize;
5407 HOST_WIDE_INT *pbitpos;
5409 enum machine_mode *pmode;
5412 unsigned int *palignment;
5415 enum machine_mode mode = VOIDmode;
5416 tree offset = size_zero_node;
5417 tree bit_offset = bitsize_zero_node;
5418 unsigned int alignment = BIGGEST_ALIGNMENT;
5419 tree placeholder_ptr = 0;
5422 /* First get the mode, signedness, and size. We do this from just the
5423 outermost expression. */
5424 if (TREE_CODE (exp) == COMPONENT_REF)
5426 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5427 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5428 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5430 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5432 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5434 size_tree = TREE_OPERAND (exp, 1);
5435 *punsignedp = TREE_UNSIGNED (exp);
5439 mode = TYPE_MODE (TREE_TYPE (exp));
5440 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5442 if (mode == BLKmode)
5443 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5445 *pbitsize = GET_MODE_BITSIZE (mode);
5450 if (! host_integerp (size_tree, 1))
5451 mode = BLKmode, *pbitsize = -1;
5453 *pbitsize = tree_low_cst (size_tree, 1);
5456 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5457 and find the ultimate containing object. */
5460 if (TREE_CODE (exp) == BIT_FIELD_REF)
5461 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5462 else if (TREE_CODE (exp) == COMPONENT_REF)
5464 tree field = TREE_OPERAND (exp, 1);
5465 tree this_offset = DECL_FIELD_OFFSET (field);
5467 /* If this field hasn't been filled in yet, don't go
5468 past it. This should only happen when folding expressions
5469 made during type construction. */
5470 if (this_offset == 0)
5472 else if (! TREE_CONSTANT (this_offset)
5473 && contains_placeholder_p (this_offset))
5474 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5476 offset = size_binop (PLUS_EXPR, offset, this_offset);
5477 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5478 DECL_FIELD_BIT_OFFSET (field));
5480 if (! host_integerp (offset, 0))
5481 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5484 else if (TREE_CODE (exp) == ARRAY_REF
5485 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5487 tree index = TREE_OPERAND (exp, 1);
5488 tree array = TREE_OPERAND (exp, 0);
5489 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5490 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5491 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5493 /* We assume all arrays have sizes that are a multiple of a byte.
5494 First subtract the lower bound, if any, in the type of the
5495 index, then convert to sizetype and multiply by the size of the
5497 if (low_bound != 0 && ! integer_zerop (low_bound))
5498 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5501 /* If the index has a self-referential type, pass it to a
5502 WITH_RECORD_EXPR; if the component size is, pass our
5503 component to one. */
5504 if (! TREE_CONSTANT (index)
5505 && contains_placeholder_p (index))
5506 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5507 if (! TREE_CONSTANT (unit_size)
5508 && contains_placeholder_p (unit_size))
5509 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5511 offset = size_binop (PLUS_EXPR, offset,
5512 size_binop (MULT_EXPR,
5513 convert (sizetype, index),
5517 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5519 tree new = find_placeholder (exp, &placeholder_ptr);
5521 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5522 We might have been called from tree optimization where we
5523 haven't set up an object yet. */
5531 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5532 && ! ((TREE_CODE (exp) == NOP_EXPR
5533 || TREE_CODE (exp) == CONVERT_EXPR)
5534 && (TYPE_MODE (TREE_TYPE (exp))
5535 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5538 /* If any reference in the chain is volatile, the effect is volatile. */
5539 if (TREE_THIS_VOLATILE (exp))
5542 /* If the offset is non-constant already, then we can't assume any
5543 alignment more than the alignment here. */
5544 if (! TREE_CONSTANT (offset))
5545 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5547 exp = TREE_OPERAND (exp, 0);
5551 alignment = MIN (alignment, DECL_ALIGN (exp));
5552 else if (TREE_TYPE (exp) != 0)
5553 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5555 /* If OFFSET is constant, see if we can return the whole thing as a
5556 constant bit position. Otherwise, split it up. */
5557 if (host_integerp (offset, 0)
5558 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5560 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5561 && host_integerp (tem, 0))
5562 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5564 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5567 *palignment = alignment;
5571 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5573 static enum memory_use_mode
5574 get_memory_usage_from_modifier (modifier)
5575 enum expand_modifier modifier;
5581 return MEMORY_USE_RO;
5583 case EXPAND_MEMORY_USE_WO:
5584 return MEMORY_USE_WO;
5586 case EXPAND_MEMORY_USE_RW:
5587 return MEMORY_USE_RW;
5589 case EXPAND_MEMORY_USE_DONT:
5590 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5591 MEMORY_USE_DONT, because they are modifiers to a call of
5592 expand_expr in the ADDR_EXPR case of expand_expr. */
5593 case EXPAND_CONST_ADDRESS:
5594 case EXPAND_INITIALIZER:
5595 return MEMORY_USE_DONT;
5596 case EXPAND_MEMORY_USE_BAD:
5602 /* Given an rtx VALUE that may contain additions and multiplications, return
5603 an equivalent value that just refers to a register, memory, or constant.
5604 This is done by generating instructions to perform the arithmetic and
5605 returning a pseudo-register containing the value.
5607 The returned value may be a REG, SUBREG, MEM or constant. */
5610 force_operand (value, target)
5614 /* Use a temporary to force order of execution of calls to
5618 /* Use subtarget as the target for operand 0 of a binary operation. */
5619 rtx subtarget = get_subtarget (target);
5621 /* Check for a PIC address load. */
5623 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5624 && XEXP (value, 0) == pic_offset_table_rtx
5625 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5626 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5627 || GET_CODE (XEXP (value, 1)) == CONST))
5630 subtarget = gen_reg_rtx (GET_MODE (value));
5631 emit_move_insn (subtarget, value);
5635 if (GET_CODE (value) == PLUS)
5636 binoptab = add_optab;
5637 else if (GET_CODE (value) == MINUS)
5638 binoptab = sub_optab;
5639 else if (GET_CODE (value) == MULT)
5641 op2 = XEXP (value, 1);
5642 if (!CONSTANT_P (op2)
5643 && !(GET_CODE (op2) == REG && op2 != subtarget))
5645 tmp = force_operand (XEXP (value, 0), subtarget);
5646 return expand_mult (GET_MODE (value), tmp,
5647 force_operand (op2, NULL_RTX),
5653 op2 = XEXP (value, 1);
5654 if (!CONSTANT_P (op2)
5655 && !(GET_CODE (op2) == REG && op2 != subtarget))
5657 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5659 binoptab = add_optab;
5660 op2 = negate_rtx (GET_MODE (value), op2);
5663 /* Check for an addition with OP2 a constant integer and our first
5664 operand a PLUS of a virtual register and something else. In that
5665 case, we want to emit the sum of the virtual register and the
5666 constant first and then add the other value. This allows virtual
5667 register instantiation to simply modify the constant rather than
5668 creating another one around this addition. */
5669 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5670 && GET_CODE (XEXP (value, 0)) == PLUS
5671 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5672 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5673 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5675 rtx temp = expand_binop (GET_MODE (value), binoptab,
5676 XEXP (XEXP (value, 0), 0), op2,
5677 subtarget, 0, OPTAB_LIB_WIDEN);
5678 return expand_binop (GET_MODE (value), binoptab, temp,
5679 force_operand (XEXP (XEXP (value, 0), 1), 0),
5680 target, 0, OPTAB_LIB_WIDEN);
5683 tmp = force_operand (XEXP (value, 0), subtarget);
5684 return expand_binop (GET_MODE (value), binoptab, tmp,
5685 force_operand (op2, NULL_RTX),
5686 target, 0, OPTAB_LIB_WIDEN);
5687 /* We give UNSIGNEDP = 0 to expand_binop
5688 because the only operations we are expanding here are signed ones. */
5693 /* Subroutine of expand_expr: return nonzero iff there is no way that
5694 EXP can reference X, which is being modified. TOP_P is nonzero if this
5695 call is going to be used to determine whether we need a temporary
5696 for EXP, as opposed to a recursive call to this function.
5698 It is always safe for this routine to return zero since it merely
5699 searches for optimization opportunities. */
5702 safe_from_p (x, exp, top_p)
5709 static tree save_expr_list;
5712 /* If EXP has varying size, we MUST use a target since we currently
5713 have no way of allocating temporaries of variable size
5714 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5715 So we assume here that something at a higher level has prevented a
5716 clash. This is somewhat bogus, but the best we can do. Only
5717 do this when X is BLKmode and when we are at the top level. */
5718 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5719 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5720 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5721 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5722 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5724 && GET_MODE (x) == BLKmode)
5725 /* If X is in the outgoing argument area, it is always safe. */
5726 || (GET_CODE (x) == MEM
5727 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5728 || (GET_CODE (XEXP (x, 0)) == PLUS
5729 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5732 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5733 find the underlying pseudo. */
5734 if (GET_CODE (x) == SUBREG)
5737 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5741 /* A SAVE_EXPR might appear many times in the expression passed to the
5742 top-level safe_from_p call, and if it has a complex subexpression,
5743 examining it multiple times could result in a combinatorial explosion.
5744 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5745 with optimization took about 28 minutes to compile -- even though it was
5746 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5747 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5748 we have processed. Note that the only test of top_p was above. */
5757 rtn = safe_from_p (x, exp, 0);
5759 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5760 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5765 /* Now look at our tree code and possibly recurse. */
5766 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5769 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5776 if (TREE_CODE (exp) == TREE_LIST)
5777 return ((TREE_VALUE (exp) == 0
5778 || safe_from_p (x, TREE_VALUE (exp), 0))
5779 && (TREE_CHAIN (exp) == 0
5780 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5781 else if (TREE_CODE (exp) == ERROR_MARK)
5782 return 1; /* An already-visited SAVE_EXPR? */
5787 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5791 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5792 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5796 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5797 the expression. If it is set, we conflict iff we are that rtx or
5798 both are in memory. Otherwise, we check all operands of the
5799 expression recursively. */
5801 switch (TREE_CODE (exp))
5804 /* If the operand is static or we are static, we can't conflict.
5805 Likewise if we don't conflict with the operand at all. */
5806 if (staticp (TREE_OPERAND (exp, 0))
5807 || TREE_STATIC (exp)
5808 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5811 /* Otherwise, the only way this can conflict is if we are taking
5812 the address of a DECL a that address if part of X, which is
5814 exp = TREE_OPERAND (exp, 0);
5817 if (!DECL_RTL_SET_P (exp)
5818 || GET_CODE (DECL_RTL (exp)) != MEM)
5821 exp_rtl = XEXP (DECL_RTL (exp), 0);
5826 if (GET_CODE (x) == MEM
5827 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5828 get_alias_set (exp)))
5833 /* Assume that the call will clobber all hard registers and
5835 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5836 || GET_CODE (x) == MEM)
5841 /* If a sequence exists, we would have to scan every instruction
5842 in the sequence to see if it was safe. This is probably not
5844 if (RTL_EXPR_SEQUENCE (exp))
5847 exp_rtl = RTL_EXPR_RTL (exp);
5850 case WITH_CLEANUP_EXPR:
5851 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5854 case CLEANUP_POINT_EXPR:
5855 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5858 exp_rtl = SAVE_EXPR_RTL (exp);
5862 /* If we've already scanned this, don't do it again. Otherwise,
5863 show we've scanned it and record for clearing the flag if we're
5865 if (TREE_PRIVATE (exp))
5868 TREE_PRIVATE (exp) = 1;
5869 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5871 TREE_PRIVATE (exp) = 0;
5875 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5879 /* The only operand we look at is operand 1. The rest aren't
5880 part of the expression. */
5881 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5883 case METHOD_CALL_EXPR:
5884 /* This takes an rtx argument, but shouldn't appear here. */
5891 /* If we have an rtx, we do not need to scan our operands. */
5895 nops = first_rtl_op (TREE_CODE (exp));
5896 for (i = 0; i < nops; i++)
5897 if (TREE_OPERAND (exp, i) != 0
5898 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5901 /* If this is a language-specific tree code, it may require
5902 special handling. */
5903 if ((unsigned int) TREE_CODE (exp)
5904 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5906 && !(*lang_safe_from_p) (x, exp))
5910 /* If we have an rtl, find any enclosed object. Then see if we conflict
5914 if (GET_CODE (exp_rtl) == SUBREG)
5916 exp_rtl = SUBREG_REG (exp_rtl);
5917 if (GET_CODE (exp_rtl) == REG
5918 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5922 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5923 are memory and they conflict. */
5924 return ! (rtx_equal_p (x, exp_rtl)
5925 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5926 && true_dependence (exp_rtl, GET_MODE (x), x,
5927 rtx_addr_varies_p)));
5930 /* If we reach here, it is safe. */
5934 /* Subroutine of expand_expr: return rtx if EXP is a
5935 variable or parameter; else return 0. */
5942 switch (TREE_CODE (exp))
5946 return DECL_RTL (exp);
5952 #ifdef MAX_INTEGER_COMPUTATION_MODE
5955 check_max_integer_computation_mode (exp)
5958 enum tree_code code;
5959 enum machine_mode mode;
5961 /* Strip any NOPs that don't change the mode. */
5963 code = TREE_CODE (exp);
5965 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5966 if (code == NOP_EXPR
5967 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5970 /* First check the type of the overall operation. We need only look at
5971 unary, binary and relational operations. */
5972 if (TREE_CODE_CLASS (code) == '1'
5973 || TREE_CODE_CLASS (code) == '2'
5974 || TREE_CODE_CLASS (code) == '<')
5976 mode = TYPE_MODE (TREE_TYPE (exp));
5977 if (GET_MODE_CLASS (mode) == MODE_INT
5978 && mode > MAX_INTEGER_COMPUTATION_MODE)
5979 internal_error ("unsupported wide integer operation");
5982 /* Check operand of a unary op. */
5983 if (TREE_CODE_CLASS (code) == '1')
5985 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5986 if (GET_MODE_CLASS (mode) == MODE_INT
5987 && mode > MAX_INTEGER_COMPUTATION_MODE)
5988 internal_error ("unsupported wide integer operation");
5991 /* Check operands of a binary/comparison op. */
5992 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5994 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5995 if (GET_MODE_CLASS (mode) == MODE_INT
5996 && mode > MAX_INTEGER_COMPUTATION_MODE)
5997 internal_error ("unsupported wide integer operation");
5999 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6000 if (GET_MODE_CLASS (mode) == MODE_INT
6001 && mode > MAX_INTEGER_COMPUTATION_MODE)
6002 internal_error ("unsupported wide integer operation");
6007 /* Return the highest power of two that EXP is known to be a multiple of.
6008 This is used in updating alignment of MEMs in array references. */
6010 static HOST_WIDE_INT
6011 highest_pow2_factor (exp)
6014 HOST_WIDE_INT c0, c1;
6016 switch (TREE_CODE (exp))
6019 /* If the integer is expressable in a HOST_WIDE_INT, we can find
6020 the lowest bit that's a one. If the result is zero or negative,
6021 pessimize by returning 1. This is overly-conservative, but such
6022 things should not happen in the offset expressions that we are
6024 if (host_integerp (exp, 0))
6026 c0 = tree_low_cst (exp, 0);
6027 return c0 >= 0 ? c0 & -c0 : 1;
6031 case PLUS_EXPR: case MINUS_EXPR:
6032 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6033 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6034 return MIN (c0, c1);
6037 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6038 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6041 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6043 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6044 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6045 return MAX (1, c0 / c1);
6047 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6048 case COMPOUND_EXPR: case SAVE_EXPR:
6049 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6052 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6053 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6054 return MIN (c0, c1);
6063 /* Return an object on the placeholder list that matches EXP, a
6064 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6065 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6066 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6067 is a location which initially points to a starting location in the
6068 placeholder list (zero means start of the list) and where a pointer into
6069 the placeholder list at which the object is found is placed. */
6072 find_placeholder (exp, plist)
6076 tree type = TREE_TYPE (exp);
6077 tree placeholder_expr;
6079 for (placeholder_expr
6080 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6081 placeholder_expr != 0;
6082 placeholder_expr = TREE_CHAIN (placeholder_expr))
6084 tree need_type = TYPE_MAIN_VARIANT (type);
6087 /* Find the outermost reference that is of the type we want. If none,
6088 see if any object has a type that is a pointer to the type we
6090 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6091 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6092 || TREE_CODE (elt) == COND_EXPR)
6093 ? TREE_OPERAND (elt, 1)
6094 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6095 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6096 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6097 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6098 ? TREE_OPERAND (elt, 0) : 0))
6099 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6102 *plist = placeholder_expr;
6106 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6108 = ((TREE_CODE (elt) == COMPOUND_EXPR
6109 || TREE_CODE (elt) == COND_EXPR)
6110 ? TREE_OPERAND (elt, 1)
6111 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6112 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6113 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6114 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6115 ? TREE_OPERAND (elt, 0) : 0))
6116 if (POINTER_TYPE_P (TREE_TYPE (elt))
6117 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6121 *plist = placeholder_expr;
6122 return build1 (INDIRECT_REF, need_type, elt);
6129 /* expand_expr: generate code for computing expression EXP.
6130 An rtx for the computed value is returned. The value is never null.
6131 In the case of a void EXP, const0_rtx is returned.
6133 The value may be stored in TARGET if TARGET is nonzero.
6134 TARGET is just a suggestion; callers must assume that
6135 the rtx returned may not be the same as TARGET.
6137 If TARGET is CONST0_RTX, it means that the value will be ignored.
6139 If TMODE is not VOIDmode, it suggests generating the
6140 result in mode TMODE. But this is done only when convenient.
6141 Otherwise, TMODE is ignored and the value generated in its natural mode.
6142 TMODE is just a suggestion; callers must assume that
6143 the rtx returned may not have mode TMODE.
6145 Note that TARGET may have neither TMODE nor MODE. In that case, it
6146 probably will not be used.
6148 If MODIFIER is EXPAND_SUM then when EXP is an addition
6149 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6150 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6151 products as above, or REG or MEM, or constant.
6152 Ordinarily in such cases we would output mul or add instructions
6153 and then return a pseudo reg containing the sum.
6155 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6156 it also marks a label as absolutely required (it can't be dead).
6157 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6158 This is used for outputting expressions used in initializers.
6160 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6161 with a constant address even if that address is not normally legitimate.
6162 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6165 expand_expr (exp, target, tmode, modifier)
6168 enum machine_mode tmode;
6169 enum expand_modifier modifier;
6172 tree type = TREE_TYPE (exp);
6173 int unsignedp = TREE_UNSIGNED (type);
6174 enum machine_mode mode;
6175 enum tree_code code = TREE_CODE (exp);
6177 rtx subtarget, original_target;
6180 /* Used by check-memory-usage to make modifier read only. */
6181 enum expand_modifier ro_modifier;
6183 /* Handle ERROR_MARK before anybody tries to access its type. */
6184 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6186 op0 = CONST0_RTX (tmode);
6192 mode = TYPE_MODE (type);
6193 /* Use subtarget as the target for operand 0 of a binary operation. */
6194 subtarget = get_subtarget (target);
6195 original_target = target;
6196 ignore = (target == const0_rtx
6197 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6198 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6199 || code == COND_EXPR)
6200 && TREE_CODE (type) == VOID_TYPE));
6202 /* Make a read-only version of the modifier. */
6203 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6204 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6205 ro_modifier = modifier;
6207 ro_modifier = EXPAND_NORMAL;
6209 /* If we are going to ignore this result, we need only do something
6210 if there is a side-effect somewhere in the expression. If there
6211 is, short-circuit the most common cases here. Note that we must
6212 not call expand_expr with anything but const0_rtx in case this
6213 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6217 if (! TREE_SIDE_EFFECTS (exp))
6220 /* Ensure we reference a volatile object even if value is ignored, but
6221 don't do this if all we are doing is taking its address. */
6222 if (TREE_THIS_VOLATILE (exp)
6223 && TREE_CODE (exp) != FUNCTION_DECL
6224 && mode != VOIDmode && mode != BLKmode
6225 && modifier != EXPAND_CONST_ADDRESS)
6227 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6228 if (GET_CODE (temp) == MEM)
6229 temp = copy_to_reg (temp);
6233 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6234 || code == INDIRECT_REF || code == BUFFER_REF)
6235 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6236 VOIDmode, ro_modifier);
6237 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6238 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6240 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6242 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6246 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6247 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6248 /* If the second operand has no side effects, just evaluate
6250 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6251 VOIDmode, ro_modifier);
6252 else if (code == BIT_FIELD_REF)
6254 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6256 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6258 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6266 #ifdef MAX_INTEGER_COMPUTATION_MODE
6267 /* Only check stuff here if the mode we want is different from the mode
6268 of the expression; if it's the same, check_max_integer_computiation_mode
6269 will handle it. Do we really need to check this stuff at all? */
6272 && GET_MODE (target) != mode
6273 && TREE_CODE (exp) != INTEGER_CST
6274 && TREE_CODE (exp) != PARM_DECL
6275 && TREE_CODE (exp) != ARRAY_REF
6276 && TREE_CODE (exp) != ARRAY_RANGE_REF
6277 && TREE_CODE (exp) != COMPONENT_REF
6278 && TREE_CODE (exp) != BIT_FIELD_REF
6279 && TREE_CODE (exp) != INDIRECT_REF
6280 && TREE_CODE (exp) != CALL_EXPR
6281 && TREE_CODE (exp) != VAR_DECL
6282 && TREE_CODE (exp) != RTL_EXPR)
6284 enum machine_mode mode = GET_MODE (target);
6286 if (GET_MODE_CLASS (mode) == MODE_INT
6287 && mode > MAX_INTEGER_COMPUTATION_MODE)
6288 internal_error ("unsupported wide integer operation");
6292 && TREE_CODE (exp) != INTEGER_CST
6293 && TREE_CODE (exp) != PARM_DECL
6294 && TREE_CODE (exp) != ARRAY_REF
6295 && TREE_CODE (exp) != ARRAY_RANGE_REF
6296 && TREE_CODE (exp) != COMPONENT_REF
6297 && TREE_CODE (exp) != BIT_FIELD_REF
6298 && TREE_CODE (exp) != INDIRECT_REF
6299 && TREE_CODE (exp) != VAR_DECL
6300 && TREE_CODE (exp) != CALL_EXPR
6301 && TREE_CODE (exp) != RTL_EXPR
6302 && GET_MODE_CLASS (tmode) == MODE_INT
6303 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6304 internal_error ("unsupported wide integer operation");
6306 check_max_integer_computation_mode (exp);
6309 /* If will do cse, generate all results into pseudo registers
6310 since 1) that allows cse to find more things
6311 and 2) otherwise cse could produce an insn the machine
6314 if (! cse_not_expected && mode != BLKmode && target
6315 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6322 tree function = decl_function_context (exp);
6323 /* Handle using a label in a containing function. */
6324 if (function != current_function_decl
6325 && function != inline_function_decl && function != 0)
6327 struct function *p = find_function_data (function);
6328 p->expr->x_forced_labels
6329 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6330 p->expr->x_forced_labels);
6334 if (modifier == EXPAND_INITIALIZER)
6335 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6340 temp = gen_rtx_MEM (FUNCTION_MODE,
6341 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6342 if (function != current_function_decl
6343 && function != inline_function_decl && function != 0)
6344 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6349 if (DECL_RTL (exp) == 0)
6351 error_with_decl (exp, "prior parameter's size depends on `%s'");
6352 return CONST0_RTX (mode);
6355 /* ... fall through ... */
6358 /* If a static var's type was incomplete when the decl was written,
6359 but the type is complete now, lay out the decl now. */
6360 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6361 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6363 layout_decl (exp, 0);
6364 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6367 /* Although static-storage variables start off initialized, according to
6368 ANSI C, a memcpy could overwrite them with uninitialized values. So
6369 we check them too. This also lets us check for read-only variables
6370 accessed via a non-const declaration, in case it won't be detected
6371 any other way (e.g., in an embedded system or OS kernel without
6374 Aggregates are not checked here; they're handled elsewhere. */
6375 if (cfun && current_function_check_memory_usage
6377 && GET_CODE (DECL_RTL (exp)) == MEM
6378 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6380 enum memory_use_mode memory_usage;
6381 memory_usage = get_memory_usage_from_modifier (modifier);
6383 in_check_memory_usage = 1;
6384 if (memory_usage != MEMORY_USE_DONT)
6385 emit_library_call (chkr_check_addr_libfunc,
6386 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6387 XEXP (DECL_RTL (exp), 0), Pmode,
6388 GEN_INT (int_size_in_bytes (type)),
6389 TYPE_MODE (sizetype),
6390 GEN_INT (memory_usage),
6391 TYPE_MODE (integer_type_node));
6392 in_check_memory_usage = 0;
6395 /* ... fall through ... */
6399 if (DECL_RTL (exp) == 0)
6402 /* Ensure variable marked as used even if it doesn't go through
6403 a parser. If it hasn't be used yet, write out an external
6405 if (! TREE_USED (exp))
6407 assemble_external (exp);
6408 TREE_USED (exp) = 1;
6411 /* Show we haven't gotten RTL for this yet. */
6414 /* Handle variables inherited from containing functions. */
6415 context = decl_function_context (exp);
6417 /* We treat inline_function_decl as an alias for the current function
6418 because that is the inline function whose vars, types, etc.
6419 are being merged into the current function.
6420 See expand_inline_function. */
6422 if (context != 0 && context != current_function_decl
6423 && context != inline_function_decl
6424 /* If var is static, we don't need a static chain to access it. */
6425 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6426 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6430 /* Mark as non-local and addressable. */
6431 DECL_NONLOCAL (exp) = 1;
6432 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6434 mark_addressable (exp);
6435 if (GET_CODE (DECL_RTL (exp)) != MEM)
6437 addr = XEXP (DECL_RTL (exp), 0);
6438 if (GET_CODE (addr) == MEM)
6440 = replace_equiv_address (addr,
6441 fix_lexical_addr (XEXP (addr, 0), exp));
6443 addr = fix_lexical_addr (addr, exp);
6445 temp = replace_equiv_address (DECL_RTL (exp), addr);
6448 /* This is the case of an array whose size is to be determined
6449 from its initializer, while the initializer is still being parsed.
6452 else if (GET_CODE (DECL_RTL (exp)) == MEM
6453 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6454 temp = validize_mem (DECL_RTL (exp));
6456 /* If DECL_RTL is memory, we are in the normal case and either
6457 the address is not valid or it is not a register and -fforce-addr
6458 is specified, get the address into a register. */
6460 else if (GET_CODE (DECL_RTL (exp)) == MEM
6461 && modifier != EXPAND_CONST_ADDRESS
6462 && modifier != EXPAND_SUM
6463 && modifier != EXPAND_INITIALIZER
6464 && (! memory_address_p (DECL_MODE (exp),
6465 XEXP (DECL_RTL (exp), 0))
6467 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6468 temp = replace_equiv_address (DECL_RTL (exp),
6469 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6471 /* If we got something, return it. But first, set the alignment
6472 if the address is a register. */
6475 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6476 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6481 /* If the mode of DECL_RTL does not match that of the decl, it
6482 must be a promoted value. We return a SUBREG of the wanted mode,
6483 but mark it so that we know that it was already extended. */
6485 if (GET_CODE (DECL_RTL (exp)) == REG
6486 && GET_MODE (DECL_RTL (exp)) != mode)
6488 /* Get the signedness used for this variable. Ensure we get the
6489 same mode we got when the variable was declared. */
6490 if (GET_MODE (DECL_RTL (exp))
6491 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6494 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6495 SUBREG_PROMOTED_VAR_P (temp) = 1;
6496 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6500 return DECL_RTL (exp);
6503 return immed_double_const (TREE_INT_CST_LOW (exp),
6504 TREE_INT_CST_HIGH (exp), mode);
6507 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6508 EXPAND_MEMORY_USE_BAD);
6511 /* If optimized, generate immediate CONST_DOUBLE
6512 which will be turned into memory by reload if necessary.
6514 We used to force a register so that loop.c could see it. But
6515 this does not allow gen_* patterns to perform optimizations with
6516 the constants. It also produces two insns in cases like "x = 1.0;".
6517 On most machines, floating-point constants are not permitted in
6518 many insns, so we'd end up copying it to a register in any case.
6520 Now, we do the copying in expand_binop, if appropriate. */
6521 return immed_real_const (exp);
6525 if (! TREE_CST_RTL (exp))
6526 output_constant_def (exp, 1);
6528 /* TREE_CST_RTL probably contains a constant address.
6529 On RISC machines where a constant address isn't valid,
6530 make some insns to get that address into a register. */
6531 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6532 && modifier != EXPAND_CONST_ADDRESS
6533 && modifier != EXPAND_INITIALIZER
6534 && modifier != EXPAND_SUM
6535 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6537 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6538 return replace_equiv_address (TREE_CST_RTL (exp),
6539 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6540 return TREE_CST_RTL (exp);
6542 case EXPR_WITH_FILE_LOCATION:
6545 const char *saved_input_filename = input_filename;
6546 int saved_lineno = lineno;
6547 input_filename = EXPR_WFL_FILENAME (exp);
6548 lineno = EXPR_WFL_LINENO (exp);
6549 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6550 emit_line_note (input_filename, lineno);
6551 /* Possibly avoid switching back and forth here. */
6552 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6553 input_filename = saved_input_filename;
6554 lineno = saved_lineno;
6559 context = decl_function_context (exp);
6561 /* If this SAVE_EXPR was at global context, assume we are an
6562 initialization function and move it into our context. */
6564 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6566 /* We treat inline_function_decl as an alias for the current function
6567 because that is the inline function whose vars, types, etc.
6568 are being merged into the current function.
6569 See expand_inline_function. */
6570 if (context == current_function_decl || context == inline_function_decl)
6573 /* If this is non-local, handle it. */
6576 /* The following call just exists to abort if the context is
6577 not of a containing function. */
6578 find_function_data (context);
6580 temp = SAVE_EXPR_RTL (exp);
6581 if (temp && GET_CODE (temp) == REG)
6583 put_var_into_stack (exp);
6584 temp = SAVE_EXPR_RTL (exp);
6586 if (temp == 0 || GET_CODE (temp) != MEM)
6589 replace_equiv_address (temp,
6590 fix_lexical_addr (XEXP (temp, 0), exp));
6592 if (SAVE_EXPR_RTL (exp) == 0)
6594 if (mode == VOIDmode)
6597 temp = assign_temp (build_qualified_type (type,
6599 | TYPE_QUAL_CONST)),
6602 SAVE_EXPR_RTL (exp) = temp;
6603 if (!optimize && GET_CODE (temp) == REG)
6604 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6607 /* If the mode of TEMP does not match that of the expression, it
6608 must be a promoted value. We pass store_expr a SUBREG of the
6609 wanted mode but mark it so that we know that it was already
6610 extended. Note that `unsignedp' was modified above in
6613 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6615 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6616 SUBREG_PROMOTED_VAR_P (temp) = 1;
6617 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6620 if (temp == const0_rtx)
6621 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6622 EXPAND_MEMORY_USE_BAD);
6624 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6626 TREE_USED (exp) = 1;
6629 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6630 must be a promoted value. We return a SUBREG of the wanted mode,
6631 but mark it so that we know that it was already extended. */
6633 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6634 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6636 /* Compute the signedness and make the proper SUBREG. */
6637 promote_mode (type, mode, &unsignedp, 0);
6638 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6639 SUBREG_PROMOTED_VAR_P (temp) = 1;
6640 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6644 return SAVE_EXPR_RTL (exp);
6649 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6650 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6654 case PLACEHOLDER_EXPR:
6656 tree old_list = placeholder_list;
6657 tree placeholder_expr = 0;
6659 exp = find_placeholder (exp, &placeholder_expr);
6663 placeholder_list = TREE_CHAIN (placeholder_expr);
6664 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6665 placeholder_list = old_list;
6669 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6672 case WITH_RECORD_EXPR:
6673 /* Put the object on the placeholder list, expand our first operand,
6674 and pop the list. */
6675 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6677 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6678 tmode, ro_modifier);
6679 placeholder_list = TREE_CHAIN (placeholder_list);
6683 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6684 expand_goto (TREE_OPERAND (exp, 0));
6686 expand_computed_goto (TREE_OPERAND (exp, 0));
6690 expand_exit_loop_if_false (NULL,
6691 invert_truthvalue (TREE_OPERAND (exp, 0)));
6694 case LABELED_BLOCK_EXPR:
6695 if (LABELED_BLOCK_BODY (exp))
6696 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6697 /* Should perhaps use expand_label, but this is simpler and safer. */
6698 do_pending_stack_adjust ();
6699 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6702 case EXIT_BLOCK_EXPR:
6703 if (EXIT_BLOCK_RETURN (exp))
6704 sorry ("returned value in block_exit_expr");
6705 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6710 expand_start_loop (1);
6711 expand_expr_stmt (TREE_OPERAND (exp, 0));
6719 tree vars = TREE_OPERAND (exp, 0);
6720 int vars_need_expansion = 0;
6722 /* Need to open a binding contour here because
6723 if there are any cleanups they must be contained here. */
6724 expand_start_bindings (2);
6726 /* Mark the corresponding BLOCK for output in its proper place. */
6727 if (TREE_OPERAND (exp, 2) != 0
6728 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6729 insert_block (TREE_OPERAND (exp, 2));
6731 /* If VARS have not yet been expanded, expand them now. */
6734 if (!DECL_RTL_SET_P (vars))
6736 vars_need_expansion = 1;
6739 expand_decl_init (vars);
6740 vars = TREE_CHAIN (vars);
6743 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6745 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6751 if (RTL_EXPR_SEQUENCE (exp))
6753 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6755 emit_insns (RTL_EXPR_SEQUENCE (exp));
6756 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6758 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6759 free_temps_for_rtl_expr (exp);
6760 return RTL_EXPR_RTL (exp);
6763 /* If we don't need the result, just ensure we evaluate any
6768 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6769 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6770 EXPAND_MEMORY_USE_BAD);
6774 /* All elts simple constants => refer to a constant in memory. But
6775 if this is a non-BLKmode mode, let it store a field at a time
6776 since that should make a CONST_INT or CONST_DOUBLE when we
6777 fold. Likewise, if we have a target we can use, it is best to
6778 store directly into the target unless the type is large enough
6779 that memcpy will be used. If we are making an initializer and
6780 all operands are constant, put it in memory as well. */
6781 else if ((TREE_STATIC (exp)
6782 && ((mode == BLKmode
6783 && ! (target != 0 && safe_from_p (target, exp, 1)))
6784 || TREE_ADDRESSABLE (exp)
6785 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6786 && (! MOVE_BY_PIECES_P
6787 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6789 && ! mostly_zeros_p (exp))))
6790 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6792 rtx constructor = output_constant_def (exp, 1);
6794 if (modifier != EXPAND_CONST_ADDRESS
6795 && modifier != EXPAND_INITIALIZER
6796 && modifier != EXPAND_SUM)
6797 constructor = validize_mem (constructor);
6803 /* Handle calls that pass values in multiple non-contiguous
6804 locations. The Irix 6 ABI has examples of this. */
6805 if (target == 0 || ! safe_from_p (target, exp, 1)
6806 || GET_CODE (target) == PARALLEL)
6808 = assign_temp (build_qualified_type (type,
6810 | (TREE_READONLY (exp)
6811 * TYPE_QUAL_CONST))),
6812 TREE_ADDRESSABLE (exp), 1, 1);
6814 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6815 int_size_in_bytes (TREE_TYPE (exp)));
6821 tree exp1 = TREE_OPERAND (exp, 0);
6823 tree string = string_constant (exp1, &index);
6825 /* Try to optimize reads from const strings. */
6827 && TREE_CODE (string) == STRING_CST
6828 && TREE_CODE (index) == INTEGER_CST
6829 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6830 && GET_MODE_CLASS (mode) == MODE_INT
6831 && GET_MODE_SIZE (mode) == 1
6832 && modifier != EXPAND_MEMORY_USE_WO)
6834 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6836 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6837 op0 = memory_address (mode, op0);
6839 if (cfun && current_function_check_memory_usage
6840 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6842 enum memory_use_mode memory_usage;
6843 memory_usage = get_memory_usage_from_modifier (modifier);
6845 if (memory_usage != MEMORY_USE_DONT)
6847 in_check_memory_usage = 1;
6848 emit_library_call (chkr_check_addr_libfunc,
6849 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6850 Pmode, GEN_INT (int_size_in_bytes (type)),
6851 TYPE_MODE (sizetype),
6852 GEN_INT (memory_usage),
6853 TYPE_MODE (integer_type_node));
6854 in_check_memory_usage = 0;
6858 temp = gen_rtx_MEM (mode, op0);
6859 set_mem_attributes (temp, exp, 0);
6861 /* If we are writing to this object and its type is a record with
6862 readonly fields, we must mark it as readonly so it will
6863 conflict with readonly references to those fields. */
6864 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6865 RTX_UNCHANGING_P (temp) = 1;
6871 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6875 tree array = TREE_OPERAND (exp, 0);
6876 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6877 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6878 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6881 /* Optimize the special-case of a zero lower bound.
6883 We convert the low_bound to sizetype to avoid some problems
6884 with constant folding. (E.g. suppose the lower bound is 1,
6885 and its mode is QI. Without the conversion, (ARRAY
6886 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6887 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6889 if (! integer_zerop (low_bound))
6890 index = size_diffop (index, convert (sizetype, low_bound));
6892 /* Fold an expression like: "foo"[2].
6893 This is not done in fold so it won't happen inside &.
6894 Don't fold if this is for wide characters since it's too
6895 difficult to do correctly and this is a very rare case. */
6897 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6898 && TREE_CODE (array) == STRING_CST
6899 && TREE_CODE (index) == INTEGER_CST
6900 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6901 && GET_MODE_CLASS (mode) == MODE_INT
6902 && GET_MODE_SIZE (mode) == 1)
6904 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6906 /* If this is a constant index into a constant array,
6907 just get the value from the array. Handle both the cases when
6908 we have an explicit constructor and when our operand is a variable
6909 that was declared const. */
6911 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6912 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6913 && TREE_CODE (index) == INTEGER_CST
6914 && 0 > compare_tree_int (index,
6915 list_length (CONSTRUCTOR_ELTS
6916 (TREE_OPERAND (exp, 0)))))
6920 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6921 i = TREE_INT_CST_LOW (index);
6922 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6926 return expand_expr (fold (TREE_VALUE (elem)), target,
6927 tmode, ro_modifier);
6930 else if (optimize >= 1
6931 && modifier != EXPAND_CONST_ADDRESS
6932 && modifier != EXPAND_INITIALIZER
6933 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6934 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6935 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6937 if (TREE_CODE (index) == INTEGER_CST)
6939 tree init = DECL_INITIAL (array);
6941 if (TREE_CODE (init) == CONSTRUCTOR)
6945 for (elem = CONSTRUCTOR_ELTS (init);
6947 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6948 elem = TREE_CHAIN (elem))
6951 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6952 return expand_expr (fold (TREE_VALUE (elem)), target,
6953 tmode, ro_modifier);
6955 else if (TREE_CODE (init) == STRING_CST
6956 && 0 > compare_tree_int (index,
6957 TREE_STRING_LENGTH (init)))
6959 tree type = TREE_TYPE (TREE_TYPE (init));
6960 enum machine_mode mode = TYPE_MODE (type);
6962 if (GET_MODE_CLASS (mode) == MODE_INT
6963 && GET_MODE_SIZE (mode) == 1)
6965 (TREE_STRING_POINTER
6966 (init)[TREE_INT_CST_LOW (index)]));
6975 case ARRAY_RANGE_REF:
6976 /* If the operand is a CONSTRUCTOR, we can just extract the
6977 appropriate field if it is present. Don't do this if we have
6978 already written the data since we want to refer to that copy
6979 and varasm.c assumes that's what we'll do. */
6980 if (code == COMPONENT_REF
6981 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6982 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6986 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6987 elt = TREE_CHAIN (elt))
6988 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6989 /* We can normally use the value of the field in the
6990 CONSTRUCTOR. However, if this is a bitfield in
6991 an integral mode that we can fit in a HOST_WIDE_INT,
6992 we must mask only the number of bits in the bitfield,
6993 since this is done implicitly by the constructor. If
6994 the bitfield does not meet either of those conditions,
6995 we can't do this optimization. */
6996 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6997 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6999 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7000 <= HOST_BITS_PER_WIDE_INT))))
7002 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7003 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7005 HOST_WIDE_INT bitsize
7006 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7008 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7010 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7011 op0 = expand_and (op0, op1, target);
7015 enum machine_mode imode
7016 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7018 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7021 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7023 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7033 enum machine_mode mode1;
7034 HOST_WIDE_INT bitsize, bitpos;
7037 unsigned int alignment;
7038 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7039 &mode1, &unsignedp, &volatilep,
7043 /* If we got back the original object, something is wrong. Perhaps
7044 we are evaluating an expression too early. In any event, don't
7045 infinitely recurse. */
7049 /* If TEM's type is a union of variable size, pass TARGET to the inner
7050 computation, since it will need a temporary and TARGET is known
7051 to have to do. This occurs in unchecked conversion in Ada. */
7055 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7056 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7058 ? target : NULL_RTX),
7060 (modifier == EXPAND_INITIALIZER
7061 || modifier == EXPAND_CONST_ADDRESS)
7062 ? modifier : EXPAND_NORMAL);
7064 /* If this is a constant, put it into a register if it is a
7065 legitimate constant and OFFSET is 0 and memory if it isn't. */
7066 if (CONSTANT_P (op0))
7068 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7069 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7071 op0 = force_reg (mode, op0);
7073 op0 = validize_mem (force_const_mem (mode, op0));
7078 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7080 /* If this object is in a register, put it into memory.
7081 This case can't occur in C, but can in Ada if we have
7082 unchecked conversion of an expression from a scalar type to
7083 an array or record type. */
7084 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7085 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7087 /* If the operand is a SAVE_EXPR, we can deal with this by
7088 forcing the SAVE_EXPR into memory. */
7089 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7091 put_var_into_stack (TREE_OPERAND (exp, 0));
7092 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7097 = build_qualified_type (TREE_TYPE (tem),
7098 (TYPE_QUALS (TREE_TYPE (tem))
7099 | TYPE_QUAL_CONST));
7100 rtx memloc = assign_temp (nt, 1, 1, 1);
7102 mark_temp_addr_taken (memloc);
7103 emit_move_insn (memloc, op0);
7108 if (GET_CODE (op0) != MEM)
7111 if (GET_MODE (offset_rtx) != ptr_mode)
7112 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7114 #ifdef POINTERS_EXTEND_UNSIGNED
7115 if (GET_MODE (offset_rtx) != Pmode)
7116 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7119 /* A constant address in OP0 can have VOIDmode, we must not try
7120 to call force_reg for that case. Avoid that case. */
7121 if (GET_CODE (op0) == MEM
7122 && GET_MODE (op0) == BLKmode
7123 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7125 && (bitpos % bitsize) == 0
7126 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7127 && alignment == GET_MODE_ALIGNMENT (mode1))
7129 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7131 if (GET_CODE (XEXP (temp, 0)) == REG)
7134 op0 = (replace_equiv_address
7136 force_reg (GET_MODE (XEXP (temp, 0)),
7141 op0 = offset_address (op0, offset_rtx,
7142 highest_pow2_factor (offset));
7145 /* Don't forget about volatility even if this is a bitfield. */
7146 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7148 if (op0 == orig_op0)
7149 op0 = copy_rtx (op0);
7151 MEM_VOLATILE_P (op0) = 1;
7154 /* Check the access. */
7155 if (cfun != 0 && current_function_check_memory_usage
7156 && GET_CODE (op0) == MEM)
7158 enum memory_use_mode memory_usage;
7159 memory_usage = get_memory_usage_from_modifier (modifier);
7161 if (memory_usage != MEMORY_USE_DONT)
7166 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7167 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7169 /* Check the access right of the pointer. */
7170 in_check_memory_usage = 1;
7171 if (size > BITS_PER_UNIT)
7172 emit_library_call (chkr_check_addr_libfunc,
7173 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7174 Pmode, GEN_INT (size / BITS_PER_UNIT),
7175 TYPE_MODE (sizetype),
7176 GEN_INT (memory_usage),
7177 TYPE_MODE (integer_type_node));
7178 in_check_memory_usage = 0;
7182 /* In cases where an aligned union has an unaligned object
7183 as a field, we might be extracting a BLKmode value from
7184 an integer-mode (e.g., SImode) object. Handle this case
7185 by doing the extract into an object as wide as the field
7186 (which we know to be the width of a basic mode), then
7187 storing into memory, and changing the mode to BLKmode. */
7188 if (mode1 == VOIDmode
7189 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7190 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7191 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7192 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7193 && modifier != EXPAND_CONST_ADDRESS
7194 && modifier != EXPAND_INITIALIZER)
7195 /* If the field isn't aligned enough to fetch as a memref,
7196 fetch it as a bit field. */
7197 || (mode1 != BLKmode
7198 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7199 && ((TYPE_ALIGN (TREE_TYPE (tem))
7200 < GET_MODE_ALIGNMENT (mode))
7201 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7202 /* If the type and the field are a constant size and the
7203 size of the type isn't the same size as the bitfield,
7204 we must use bitfield operations. */
7206 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7208 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7211 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7212 && (TYPE_ALIGN (type) > alignment
7213 || bitpos % TYPE_ALIGN (type) != 0)))
7215 enum machine_mode ext_mode = mode;
7217 if (ext_mode == BLKmode
7218 && ! (target != 0 && GET_CODE (op0) == MEM
7219 && GET_CODE (target) == MEM
7220 && bitpos % BITS_PER_UNIT == 0))
7221 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7223 if (ext_mode == BLKmode)
7225 /* In this case, BITPOS must start at a byte boundary and
7226 TARGET, if specified, must be a MEM. */
7227 if (GET_CODE (op0) != MEM
7228 || (target != 0 && GET_CODE (target) != MEM)
7229 || bitpos % BITS_PER_UNIT != 0)
7232 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7234 target = assign_temp (type, 0, 1, 1);
7236 emit_block_move (target, op0,
7237 bitsize == -1 ? expr_size (exp)
7238 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7244 op0 = validize_mem (op0);
7246 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7247 mark_reg_pointer (XEXP (op0, 0), alignment);
7249 op0 = extract_bit_field (op0, bitsize, bitpos,
7250 unsignedp, target, ext_mode, ext_mode,
7252 int_size_in_bytes (TREE_TYPE (tem)));
7254 /* If the result is a record type and BITSIZE is narrower than
7255 the mode of OP0, an integral mode, and this is a big endian
7256 machine, we must put the field into the high-order bits. */
7257 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7258 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7259 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7260 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7261 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7265 if (mode == BLKmode)
7267 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7269 rtx new = assign_temp (nt, 0, 1, 1);
7271 emit_move_insn (new, op0);
7272 op0 = copy_rtx (new);
7273 PUT_MODE (op0, BLKmode);
7279 /* If the result is BLKmode, use that to access the object
7281 if (mode == BLKmode)
7284 /* Get a reference to just this component. */
7285 if (modifier == EXPAND_CONST_ADDRESS
7286 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7287 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7289 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7291 if (op0 == orig_op0)
7292 op0 = copy_rtx (op0);
7294 set_mem_attributes (op0, exp, 0);
7295 if (GET_CODE (XEXP (op0, 0)) == REG)
7296 mark_reg_pointer (XEXP (op0, 0), alignment);
7298 MEM_VOLATILE_P (op0) |= volatilep;
7299 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7300 || modifier == EXPAND_CONST_ADDRESS
7301 || modifier == EXPAND_INITIALIZER)
7303 else if (target == 0)
7304 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7306 convert_move (target, op0, unsignedp);
7312 rtx insn, before = get_last_insn (), vtbl_ref;
7314 /* Evaluate the interior expression. */
7315 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7318 /* Get or create an instruction off which to hang a note. */
7319 if (REG_P (subtarget))
7322 insn = get_last_insn ();
7325 if (! INSN_P (insn))
7326 insn = prev_nonnote_insn (insn);
7330 target = gen_reg_rtx (GET_MODE (subtarget));
7331 insn = emit_move_insn (target, subtarget);
7334 /* Collect the data for the note. */
7335 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7336 vtbl_ref = plus_constant (vtbl_ref,
7337 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7338 /* Discard the initial CONST that was added. */
7339 vtbl_ref = XEXP (vtbl_ref, 0);
7342 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7347 /* Intended for a reference to a buffer of a file-object in Pascal.
7348 But it's not certain that a special tree code will really be
7349 necessary for these. INDIRECT_REF might work for them. */
7355 /* Pascal set IN expression.
7358 rlo = set_low - (set_low%bits_per_word);
7359 the_word = set [ (index - rlo)/bits_per_word ];
7360 bit_index = index % bits_per_word;
7361 bitmask = 1 << bit_index;
7362 return !!(the_word & bitmask); */
7364 tree set = TREE_OPERAND (exp, 0);
7365 tree index = TREE_OPERAND (exp, 1);
7366 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7367 tree set_type = TREE_TYPE (set);
7368 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7369 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7370 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7371 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7372 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7373 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7374 rtx setaddr = XEXP (setval, 0);
7375 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7377 rtx diff, quo, rem, addr, bit, result;
7379 /* If domain is empty, answer is no. Likewise if index is constant
7380 and out of bounds. */
7381 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7382 && TREE_CODE (set_low_bound) == INTEGER_CST
7383 && tree_int_cst_lt (set_high_bound, set_low_bound))
7384 || (TREE_CODE (index) == INTEGER_CST
7385 && TREE_CODE (set_low_bound) == INTEGER_CST
7386 && tree_int_cst_lt (index, set_low_bound))
7387 || (TREE_CODE (set_high_bound) == INTEGER_CST
7388 && TREE_CODE (index) == INTEGER_CST
7389 && tree_int_cst_lt (set_high_bound, index))))
7393 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7395 /* If we get here, we have to generate the code for both cases
7396 (in range and out of range). */
7398 op0 = gen_label_rtx ();
7399 op1 = gen_label_rtx ();
7401 if (! (GET_CODE (index_val) == CONST_INT
7402 && GET_CODE (lo_r) == CONST_INT))
7404 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7405 GET_MODE (index_val), iunsignedp, 0, op1);
7408 if (! (GET_CODE (index_val) == CONST_INT
7409 && GET_CODE (hi_r) == CONST_INT))
7411 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7412 GET_MODE (index_val), iunsignedp, 0, op1);
7415 /* Calculate the element number of bit zero in the first word
7417 if (GET_CODE (lo_r) == CONST_INT)
7418 rlow = GEN_INT (INTVAL (lo_r)
7419 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7421 rlow = expand_binop (index_mode, and_optab, lo_r,
7422 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7423 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7425 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7426 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7428 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7429 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7430 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7431 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7433 addr = memory_address (byte_mode,
7434 expand_binop (index_mode, add_optab, diff,
7435 setaddr, NULL_RTX, iunsignedp,
7438 /* Extract the bit we want to examine. */
7439 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7440 gen_rtx_MEM (byte_mode, addr),
7441 make_tree (TREE_TYPE (index), rem),
7443 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7444 GET_MODE (target) == byte_mode ? target : 0,
7445 1, OPTAB_LIB_WIDEN);
7447 if (result != target)
7448 convert_move (target, result, 1);
7450 /* Output the code to handle the out-of-range case. */
7453 emit_move_insn (target, const0_rtx);
7458 case WITH_CLEANUP_EXPR:
7459 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7461 WITH_CLEANUP_EXPR_RTL (exp)
7462 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7463 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7465 /* That's it for this cleanup. */
7466 TREE_OPERAND (exp, 1) = 0;
7468 return WITH_CLEANUP_EXPR_RTL (exp);
7470 case CLEANUP_POINT_EXPR:
7472 /* Start a new binding layer that will keep track of all cleanup
7473 actions to be performed. */
7474 expand_start_bindings (2);
7476 target_temp_slot_level = temp_slot_level;
7478 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7479 /* If we're going to use this value, load it up now. */
7481 op0 = force_not_mem (op0);
7482 preserve_temp_slots (op0);
7483 expand_end_bindings (NULL_TREE, 0, 0);
7488 /* Check for a built-in function. */
7489 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7490 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7492 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7494 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7495 == BUILT_IN_FRONTEND)
7496 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7498 return expand_builtin (exp, target, subtarget, tmode, ignore);
7501 return expand_call (exp, target, ignore);
7503 case NON_LVALUE_EXPR:
7506 case REFERENCE_EXPR:
7507 if (TREE_OPERAND (exp, 0) == error_mark_node)
7510 if (TREE_CODE (type) == UNION_TYPE)
7512 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7514 /* If both input and output are BLKmode, this conversion
7515 isn't actually doing anything unless we need to make the
7516 alignment stricter. */
7517 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7518 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7519 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7520 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7524 target = assign_temp (type, 0, 1, 1);
7526 if (GET_CODE (target) == MEM)
7527 /* Store data into beginning of memory target. */
7528 store_expr (TREE_OPERAND (exp, 0),
7529 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7531 else if (GET_CODE (target) == REG)
7532 /* Store this field into a union of the proper type. */
7533 store_field (target,
7534 MIN ((int_size_in_bytes (TREE_TYPE
7535 (TREE_OPERAND (exp, 0)))
7537 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7538 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7539 VOIDmode, 0, BITS_PER_UNIT,
7540 int_size_in_bytes (type), 0);
7544 /* Return the entire union. */
7548 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7550 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7553 /* If the signedness of the conversion differs and OP0 is
7554 a promoted SUBREG, clear that indication since we now
7555 have to do the proper extension. */
7556 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7557 && GET_CODE (op0) == SUBREG)
7558 SUBREG_PROMOTED_VAR_P (op0) = 0;
7563 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7564 if (GET_MODE (op0) == mode)
7567 /* If OP0 is a constant, just convert it into the proper mode. */
7568 if (CONSTANT_P (op0))
7570 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7571 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7573 if (modifier == EXPAND_INITIALIZER)
7574 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7578 convert_to_mode (mode, op0,
7579 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7581 convert_move (target, op0,
7582 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7586 /* We come here from MINUS_EXPR when the second operand is a
7589 this_optab = ! unsignedp && flag_trapv
7590 && (GET_MODE_CLASS(mode) == MODE_INT)
7591 ? addv_optab : add_optab;
7593 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7594 something else, make sure we add the register to the constant and
7595 then to the other thing. This case can occur during strength
7596 reduction and doing it this way will produce better code if the
7597 frame pointer or argument pointer is eliminated.
7599 fold-const.c will ensure that the constant is always in the inner
7600 PLUS_EXPR, so the only case we need to do anything about is if
7601 sp, ap, or fp is our second argument, in which case we must swap
7602 the innermost first argument and our second argument. */
7604 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7605 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7606 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7607 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7608 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7609 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7611 tree t = TREE_OPERAND (exp, 1);
7613 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7614 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7617 /* If the result is to be ptr_mode and we are adding an integer to
7618 something, we might be forming a constant. So try to use
7619 plus_constant. If it produces a sum and we can't accept it,
7620 use force_operand. This allows P = &ARR[const] to generate
7621 efficient code on machines where a SYMBOL_REF is not a valid
7624 If this is an EXPAND_SUM call, always return the sum. */
7625 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7626 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7628 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7629 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7630 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7634 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7636 /* Use immed_double_const to ensure that the constant is
7637 truncated according to the mode of OP1, then sign extended
7638 to a HOST_WIDE_INT. Using the constant directly can result
7639 in non-canonical RTL in a 64x32 cross compile. */
7641 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7643 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7644 op1 = plus_constant (op1, INTVAL (constant_part));
7645 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7646 op1 = force_operand (op1, target);
7650 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7651 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7652 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7656 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7658 if (! CONSTANT_P (op0))
7660 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7661 VOIDmode, modifier);
7662 /* Don't go to both_summands if modifier
7663 says it's not right to return a PLUS. */
7664 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7668 /* Use immed_double_const to ensure that the constant is
7669 truncated according to the mode of OP1, then sign extended
7670 to a HOST_WIDE_INT. Using the constant directly can result
7671 in non-canonical RTL in a 64x32 cross compile. */
7673 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7675 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7676 op0 = plus_constant (op0, INTVAL (constant_part));
7677 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7678 op0 = force_operand (op0, target);
7683 /* No sense saving up arithmetic to be done
7684 if it's all in the wrong mode to form part of an address.
7685 And force_operand won't know whether to sign-extend or
7687 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7688 || mode != ptr_mode)
7691 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7694 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7695 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7698 /* Make sure any term that's a sum with a constant comes last. */
7699 if (GET_CODE (op0) == PLUS
7700 && CONSTANT_P (XEXP (op0, 1)))
7706 /* If adding to a sum including a constant,
7707 associate it to put the constant outside. */
7708 if (GET_CODE (op1) == PLUS
7709 && CONSTANT_P (XEXP (op1, 1)))
7711 rtx constant_term = const0_rtx;
7713 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7716 /* Ensure that MULT comes first if there is one. */
7717 else if (GET_CODE (op0) == MULT)
7718 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7720 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7722 /* Let's also eliminate constants from op0 if possible. */
7723 op0 = eliminate_constant_term (op0, &constant_term);
7725 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7726 their sum should be a constant. Form it into OP1, since the
7727 result we want will then be OP0 + OP1. */
7729 temp = simplify_binary_operation (PLUS, mode, constant_term,
7734 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7737 /* Put a constant term last and put a multiplication first. */
7738 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7739 temp = op1, op1 = op0, op0 = temp;
7741 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7742 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7745 /* For initializers, we are allowed to return a MINUS of two
7746 symbolic constants. Here we handle all cases when both operands
7748 /* Handle difference of two symbolic constants,
7749 for the sake of an initializer. */
7750 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7751 && really_constant_p (TREE_OPERAND (exp, 0))
7752 && really_constant_p (TREE_OPERAND (exp, 1)))
7754 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7755 VOIDmode, ro_modifier);
7756 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7757 VOIDmode, ro_modifier);
7759 /* If the last operand is a CONST_INT, use plus_constant of
7760 the negated constant. Else make the MINUS. */
7761 if (GET_CODE (op1) == CONST_INT)
7762 return plus_constant (op0, - INTVAL (op1));
7764 return gen_rtx_MINUS (mode, op0, op1);
7766 /* Convert A - const to A + (-const). */
7767 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7769 tree negated = fold (build1 (NEGATE_EXPR, type,
7770 TREE_OPERAND (exp, 1)));
7772 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7773 /* If we can't negate the constant in TYPE, leave it alone and
7774 expand_binop will negate it for us. We used to try to do it
7775 here in the signed version of TYPE, but that doesn't work
7776 on POINTER_TYPEs. */;
7779 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7783 this_optab = ! unsignedp && flag_trapv
7784 && (GET_MODE_CLASS(mode) == MODE_INT)
7785 ? subv_optab : sub_optab;
7789 /* If first operand is constant, swap them.
7790 Thus the following special case checks need only
7791 check the second operand. */
7792 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7794 tree t1 = TREE_OPERAND (exp, 0);
7795 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7796 TREE_OPERAND (exp, 1) = t1;
7799 /* Attempt to return something suitable for generating an
7800 indexed address, for machines that support that. */
7802 if (modifier == EXPAND_SUM && mode == ptr_mode
7803 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7804 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7806 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7809 /* Apply distributive law if OP0 is x+c. */
7810 if (GET_CODE (op0) == PLUS
7811 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7816 (mode, XEXP (op0, 0),
7817 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7818 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7819 * INTVAL (XEXP (op0, 1))));
7821 if (GET_CODE (op0) != REG)
7822 op0 = force_operand (op0, NULL_RTX);
7823 if (GET_CODE (op0) != REG)
7824 op0 = copy_to_mode_reg (mode, op0);
7827 gen_rtx_MULT (mode, op0,
7828 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7831 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7834 /* Check for multiplying things that have been extended
7835 from a narrower type. If this machine supports multiplying
7836 in that narrower type with a result in the desired type,
7837 do it that way, and avoid the explicit type-conversion. */
7838 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7839 && TREE_CODE (type) == INTEGER_TYPE
7840 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7841 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7842 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7843 && int_fits_type_p (TREE_OPERAND (exp, 1),
7844 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7845 /* Don't use a widening multiply if a shift will do. */
7846 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7847 > HOST_BITS_PER_WIDE_INT)
7848 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7850 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7851 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7853 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7854 /* If both operands are extended, they must either both
7855 be zero-extended or both be sign-extended. */
7856 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7858 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7860 enum machine_mode innermode
7861 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7862 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7863 ? smul_widen_optab : umul_widen_optab);
7864 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7865 ? umul_widen_optab : smul_widen_optab);
7866 if (mode == GET_MODE_WIDER_MODE (innermode))
7868 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7870 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7871 NULL_RTX, VOIDmode, 0);
7872 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7873 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7876 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7877 NULL_RTX, VOIDmode, 0);
7880 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7881 && innermode == word_mode)
7884 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7885 NULL_RTX, VOIDmode, 0);
7886 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7887 op1 = convert_modes (innermode, mode,
7888 expand_expr (TREE_OPERAND (exp, 1),
7889 NULL_RTX, VOIDmode, 0),
7892 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7893 NULL_RTX, VOIDmode, 0);
7894 temp = expand_binop (mode, other_optab, op0, op1, target,
7895 unsignedp, OPTAB_LIB_WIDEN);
7896 htem = expand_mult_highpart_adjust (innermode,
7897 gen_highpart (innermode, temp),
7899 gen_highpart (innermode, temp),
7901 emit_move_insn (gen_highpart (innermode, temp), htem);
7906 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7907 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7908 return expand_mult (mode, op0, op1, target, unsignedp);
7910 case TRUNC_DIV_EXPR:
7911 case FLOOR_DIV_EXPR:
7913 case ROUND_DIV_EXPR:
7914 case EXACT_DIV_EXPR:
7915 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7917 /* Possible optimization: compute the dividend with EXPAND_SUM
7918 then if the divisor is constant can optimize the case
7919 where some terms of the dividend have coeffs divisible by it. */
7920 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7921 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7922 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7925 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7926 expensive divide. If not, combine will rebuild the original
7928 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7929 && !real_onep (TREE_OPERAND (exp, 0)))
7930 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7931 build (RDIV_EXPR, type,
7932 build_real (type, dconst1),
7933 TREE_OPERAND (exp, 1))),
7934 target, tmode, unsignedp);
7935 this_optab = sdiv_optab;
7938 case TRUNC_MOD_EXPR:
7939 case FLOOR_MOD_EXPR:
7941 case ROUND_MOD_EXPR:
7942 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7944 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7945 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7946 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7948 case FIX_ROUND_EXPR:
7949 case FIX_FLOOR_EXPR:
7951 abort (); /* Not used for C. */
7953 case FIX_TRUNC_EXPR:
7954 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7956 target = gen_reg_rtx (mode);
7957 expand_fix (target, op0, unsignedp);
7961 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7963 target = gen_reg_rtx (mode);
7964 /* expand_float can't figure out what to do if FROM has VOIDmode.
7965 So give it the correct mode. With -O, cse will optimize this. */
7966 if (GET_MODE (op0) == VOIDmode)
7967 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7969 expand_float (target, op0,
7970 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7974 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7975 temp = expand_unop (mode,
7976 ! unsignedp && flag_trapv
7977 && (GET_MODE_CLASS(mode) == MODE_INT)
7978 ? negv_optab : neg_optab, op0, target, 0);
7984 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7986 /* Handle complex values specially. */
7987 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7988 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7989 return expand_complex_abs (mode, op0, target, unsignedp);
7991 /* Unsigned abs is simply the operand. Testing here means we don't
7992 risk generating incorrect code below. */
7993 if (TREE_UNSIGNED (type))
7996 return expand_abs (mode, op0, target, unsignedp,
7997 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8001 target = original_target;
8002 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8003 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8004 || GET_MODE (target) != mode
8005 || (GET_CODE (target) == REG
8006 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8007 target = gen_reg_rtx (mode);
8008 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8009 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8011 /* First try to do it with a special MIN or MAX instruction.
8012 If that does not win, use a conditional jump to select the proper
8014 this_optab = (TREE_UNSIGNED (type)
8015 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8016 : (code == MIN_EXPR ? smin_optab : smax_optab));
8018 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8023 /* At this point, a MEM target is no longer useful; we will get better
8026 if (GET_CODE (target) == MEM)
8027 target = gen_reg_rtx (mode);
8030 emit_move_insn (target, op0);
8032 op0 = gen_label_rtx ();
8034 /* If this mode is an integer too wide to compare properly,
8035 compare word by word. Rely on cse to optimize constant cases. */
8036 if (GET_MODE_CLASS (mode) == MODE_INT
8037 && ! can_compare_p (GE, mode, ccp_jump))
8039 if (code == MAX_EXPR)
8040 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8041 target, op1, NULL_RTX, op0);
8043 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8044 op1, target, NULL_RTX, op0);
8048 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8049 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8050 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
8053 emit_move_insn (target, op1);
8058 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8059 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8065 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8066 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8071 /* ??? Can optimize bitwise operations with one arg constant.
8072 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8073 and (a bitwise1 b) bitwise2 b (etc)
8074 but that is probably not worth while. */
8076 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8077 boolean values when we want in all cases to compute both of them. In
8078 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8079 as actual zero-or-1 values and then bitwise anding. In cases where
8080 there cannot be any side effects, better code would be made by
8081 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8082 how to recognize those cases. */
8084 case TRUTH_AND_EXPR:
8086 this_optab = and_optab;
8091 this_optab = ior_optab;
8094 case TRUTH_XOR_EXPR:
8096 this_optab = xor_optab;
8103 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8105 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8106 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8109 /* Could determine the answer when only additive constants differ. Also,
8110 the addition of one can be handled by changing the condition. */
8117 case UNORDERED_EXPR:
8124 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8128 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8129 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8131 && GET_CODE (original_target) == REG
8132 && (GET_MODE (original_target)
8133 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8135 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8138 if (temp != original_target)
8139 temp = copy_to_reg (temp);
8141 op1 = gen_label_rtx ();
8142 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8143 GET_MODE (temp), unsignedp, 0, op1);
8144 emit_move_insn (temp, const1_rtx);
8149 /* If no set-flag instruction, must generate a conditional
8150 store into a temporary variable. Drop through
8151 and handle this like && and ||. */
8153 case TRUTH_ANDIF_EXPR:
8154 case TRUTH_ORIF_EXPR:
8156 && (target == 0 || ! safe_from_p (target, exp, 1)
8157 /* Make sure we don't have a hard reg (such as function's return
8158 value) live across basic blocks, if not optimizing. */
8159 || (!optimize && GET_CODE (target) == REG
8160 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8161 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8164 emit_clr_insn (target);
8166 op1 = gen_label_rtx ();
8167 jumpifnot (exp, op1);
8170 emit_0_to_1_insn (target);
8173 return ignore ? const0_rtx : target;
8175 case TRUTH_NOT_EXPR:
8176 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8177 /* The parser is careful to generate TRUTH_NOT_EXPR
8178 only with operands that are always zero or one. */
8179 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8180 target, 1, OPTAB_LIB_WIDEN);
8186 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8188 return expand_expr (TREE_OPERAND (exp, 1),
8189 (ignore ? const0_rtx : target),
8193 /* If we would have a "singleton" (see below) were it not for a
8194 conversion in each arm, bring that conversion back out. */
8195 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8196 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8197 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8198 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8200 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8201 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8203 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8204 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8205 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8206 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8207 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8208 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8209 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8210 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8211 return expand_expr (build1 (NOP_EXPR, type,
8212 build (COND_EXPR, TREE_TYPE (iftrue),
8213 TREE_OPERAND (exp, 0),
8215 target, tmode, modifier);
8219 /* Note that COND_EXPRs whose type is a structure or union
8220 are required to be constructed to contain assignments of
8221 a temporary variable, so that we can evaluate them here
8222 for side effect only. If type is void, we must do likewise. */
8224 /* If an arm of the branch requires a cleanup,
8225 only that cleanup is performed. */
8228 tree binary_op = 0, unary_op = 0;
8230 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8231 convert it to our mode, if necessary. */
8232 if (integer_onep (TREE_OPERAND (exp, 1))
8233 && integer_zerop (TREE_OPERAND (exp, 2))
8234 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8238 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8243 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8244 if (GET_MODE (op0) == mode)
8248 target = gen_reg_rtx (mode);
8249 convert_move (target, op0, unsignedp);
8253 /* Check for X ? A + B : A. If we have this, we can copy A to the
8254 output and conditionally add B. Similarly for unary operations.
8255 Don't do this if X has side-effects because those side effects
8256 might affect A or B and the "?" operation is a sequence point in
8257 ANSI. (operand_equal_p tests for side effects.) */
8259 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8260 && operand_equal_p (TREE_OPERAND (exp, 2),
8261 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8262 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8263 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8264 && operand_equal_p (TREE_OPERAND (exp, 1),
8265 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8266 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8267 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8268 && operand_equal_p (TREE_OPERAND (exp, 2),
8269 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8270 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8271 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8272 && operand_equal_p (TREE_OPERAND (exp, 1),
8273 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8274 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8276 /* If we are not to produce a result, we have no target. Otherwise,
8277 if a target was specified use it; it will not be used as an
8278 intermediate target unless it is safe. If no target, use a
8283 else if (original_target
8284 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8285 || (singleton && GET_CODE (original_target) == REG
8286 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8287 && original_target == var_rtx (singleton)))
8288 && GET_MODE (original_target) == mode
8289 #ifdef HAVE_conditional_move
8290 && (! can_conditionally_move_p (mode)
8291 || GET_CODE (original_target) == REG
8292 || TREE_ADDRESSABLE (type))
8294 && (GET_CODE (original_target) != MEM
8295 || TREE_ADDRESSABLE (type)))
8296 temp = original_target;
8297 else if (TREE_ADDRESSABLE (type))
8300 temp = assign_temp (type, 0, 0, 1);
8302 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8303 do the test of X as a store-flag operation, do this as
8304 A + ((X != 0) << log C). Similarly for other simple binary
8305 operators. Only do for C == 1 if BRANCH_COST is low. */
8306 if (temp && singleton && binary_op
8307 && (TREE_CODE (binary_op) == PLUS_EXPR
8308 || TREE_CODE (binary_op) == MINUS_EXPR
8309 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8310 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8311 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8312 : integer_onep (TREE_OPERAND (binary_op, 1)))
8313 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8316 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8317 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8318 ? addv_optab : add_optab)
8319 : TREE_CODE (binary_op) == MINUS_EXPR
8320 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8321 ? subv_optab : sub_optab)
8322 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8325 /* If we had X ? A : A + 1, do this as A + (X == 0).
8327 We have to invert the truth value here and then put it
8328 back later if do_store_flag fails. We cannot simply copy
8329 TREE_OPERAND (exp, 0) to another variable and modify that
8330 because invert_truthvalue can modify the tree pointed to
8332 if (singleton == TREE_OPERAND (exp, 1))
8333 TREE_OPERAND (exp, 0)
8334 = invert_truthvalue (TREE_OPERAND (exp, 0));
8336 result = do_store_flag (TREE_OPERAND (exp, 0),
8337 (safe_from_p (temp, singleton, 1)
8339 mode, BRANCH_COST <= 1);
8341 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8342 result = expand_shift (LSHIFT_EXPR, mode, result,
8343 build_int_2 (tree_log2
8347 (safe_from_p (temp, singleton, 1)
8348 ? temp : NULL_RTX), 0);
8352 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8353 return expand_binop (mode, boptab, op1, result, temp,
8354 unsignedp, OPTAB_LIB_WIDEN);
8356 else if (singleton == TREE_OPERAND (exp, 1))
8357 TREE_OPERAND (exp, 0)
8358 = invert_truthvalue (TREE_OPERAND (exp, 0));
8361 do_pending_stack_adjust ();
8363 op0 = gen_label_rtx ();
8365 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8369 /* If the target conflicts with the other operand of the
8370 binary op, we can't use it. Also, we can't use the target
8371 if it is a hard register, because evaluating the condition
8372 might clobber it. */
8374 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8375 || (GET_CODE (temp) == REG
8376 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8377 temp = gen_reg_rtx (mode);
8378 store_expr (singleton, temp, 0);
8381 expand_expr (singleton,
8382 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8383 if (singleton == TREE_OPERAND (exp, 1))
8384 jumpif (TREE_OPERAND (exp, 0), op0);
8386 jumpifnot (TREE_OPERAND (exp, 0), op0);
8388 start_cleanup_deferral ();
8389 if (binary_op && temp == 0)
8390 /* Just touch the other operand. */
8391 expand_expr (TREE_OPERAND (binary_op, 1),
8392 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8394 store_expr (build (TREE_CODE (binary_op), type,
8395 make_tree (type, temp),
8396 TREE_OPERAND (binary_op, 1)),
8399 store_expr (build1 (TREE_CODE (unary_op), type,
8400 make_tree (type, temp)),
8404 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8405 comparison operator. If we have one of these cases, set the
8406 output to A, branch on A (cse will merge these two references),
8407 then set the output to FOO. */
8409 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8410 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8411 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8412 TREE_OPERAND (exp, 1), 0)
8413 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8414 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8415 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8417 if (GET_CODE (temp) == REG
8418 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8419 temp = gen_reg_rtx (mode);
8420 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8421 jumpif (TREE_OPERAND (exp, 0), op0);
8423 start_cleanup_deferral ();
8424 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8428 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8429 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8430 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8431 TREE_OPERAND (exp, 2), 0)
8432 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8433 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8434 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8436 if (GET_CODE (temp) == REG
8437 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8438 temp = gen_reg_rtx (mode);
8439 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8440 jumpifnot (TREE_OPERAND (exp, 0), op0);
8442 start_cleanup_deferral ();
8443 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8448 op1 = gen_label_rtx ();
8449 jumpifnot (TREE_OPERAND (exp, 0), op0);
8451 start_cleanup_deferral ();
8453 /* One branch of the cond can be void, if it never returns. For
8454 example A ? throw : E */
8456 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8457 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8459 expand_expr (TREE_OPERAND (exp, 1),
8460 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8461 end_cleanup_deferral ();
8463 emit_jump_insn (gen_jump (op1));
8466 start_cleanup_deferral ();
8468 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8469 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8471 expand_expr (TREE_OPERAND (exp, 2),
8472 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8475 end_cleanup_deferral ();
8486 /* Something needs to be initialized, but we didn't know
8487 where that thing was when building the tree. For example,
8488 it could be the return value of a function, or a parameter
8489 to a function which lays down in the stack, or a temporary
8490 variable which must be passed by reference.
8492 We guarantee that the expression will either be constructed
8493 or copied into our original target. */
8495 tree slot = TREE_OPERAND (exp, 0);
8496 tree cleanups = NULL_TREE;
8499 if (TREE_CODE (slot) != VAR_DECL)
8503 target = original_target;
8505 /* Set this here so that if we get a target that refers to a
8506 register variable that's already been used, put_reg_into_stack
8507 knows that it should fix up those uses. */
8508 TREE_USED (slot) = 1;
8512 if (DECL_RTL_SET_P (slot))
8514 target = DECL_RTL (slot);
8515 /* If we have already expanded the slot, so don't do
8517 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8522 target = assign_temp (type, 2, 0, 1);
8523 /* All temp slots at this level must not conflict. */
8524 preserve_temp_slots (target);
8525 SET_DECL_RTL (slot, target);
8526 if (TREE_ADDRESSABLE (slot))
8527 put_var_into_stack (slot);
8529 /* Since SLOT is not known to the called function
8530 to belong to its stack frame, we must build an explicit
8531 cleanup. This case occurs when we must build up a reference
8532 to pass the reference as an argument. In this case,
8533 it is very likely that such a reference need not be
8536 if (TREE_OPERAND (exp, 2) == 0)
8537 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8538 cleanups = TREE_OPERAND (exp, 2);
8543 /* This case does occur, when expanding a parameter which
8544 needs to be constructed on the stack. The target
8545 is the actual stack address that we want to initialize.
8546 The function we call will perform the cleanup in this case. */
8548 /* If we have already assigned it space, use that space,
8549 not target that we were passed in, as our target
8550 parameter is only a hint. */
8551 if (DECL_RTL_SET_P (slot))
8553 target = DECL_RTL (slot);
8554 /* If we have already expanded the slot, so don't do
8556 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8561 SET_DECL_RTL (slot, target);
8562 /* If we must have an addressable slot, then make sure that
8563 the RTL that we just stored in slot is OK. */
8564 if (TREE_ADDRESSABLE (slot))
8565 put_var_into_stack (slot);
8569 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8570 /* Mark it as expanded. */
8571 TREE_OPERAND (exp, 1) = NULL_TREE;
8573 store_expr (exp1, target, 0);
8575 expand_decl_cleanup (NULL_TREE, cleanups);
8582 tree lhs = TREE_OPERAND (exp, 0);
8583 tree rhs = TREE_OPERAND (exp, 1);
8585 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8591 /* If lhs is complex, expand calls in rhs before computing it.
8592 That's so we don't compute a pointer and save it over a
8593 call. If lhs is simple, compute it first so we can give it
8594 as a target if the rhs is just a call. This avoids an
8595 extra temp and copy and that prevents a partial-subsumption
8596 which makes bad code. Actually we could treat
8597 component_ref's of vars like vars. */
8599 tree lhs = TREE_OPERAND (exp, 0);
8600 tree rhs = TREE_OPERAND (exp, 1);
8604 /* Check for |= or &= of a bitfield of size one into another bitfield
8605 of size 1. In this case, (unless we need the result of the
8606 assignment) we can do this more efficiently with a
8607 test followed by an assignment, if necessary.
8609 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8610 things change so we do, this code should be enhanced to
8613 && TREE_CODE (lhs) == COMPONENT_REF
8614 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8615 || TREE_CODE (rhs) == BIT_AND_EXPR)
8616 && TREE_OPERAND (rhs, 0) == lhs
8617 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8618 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8619 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8621 rtx label = gen_label_rtx ();
8623 do_jump (TREE_OPERAND (rhs, 1),
8624 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8625 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8626 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8627 (TREE_CODE (rhs) == BIT_IOR_EXPR
8629 : integer_zero_node)),
8631 do_pending_stack_adjust ();
8636 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8642 if (!TREE_OPERAND (exp, 0))
8643 expand_null_return ();
8645 expand_return (TREE_OPERAND (exp, 0));
8648 case PREINCREMENT_EXPR:
8649 case PREDECREMENT_EXPR:
8650 return expand_increment (exp, 0, ignore);
8652 case POSTINCREMENT_EXPR:
8653 case POSTDECREMENT_EXPR:
8654 /* Faster to treat as pre-increment if result is not used. */
8655 return expand_increment (exp, ! ignore, ignore);
8658 /* If nonzero, TEMP will be set to the address of something that might
8659 be a MEM corresponding to a stack slot. */
8662 /* Are we taking the address of a nested function? */
8663 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8664 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8665 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8666 && ! TREE_STATIC (exp))
8668 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8669 op0 = force_operand (op0, target);
8671 /* If we are taking the address of something erroneous, just
8673 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8675 /* If we are taking the address of a constant and are at the
8676 top level, we have to use output_constant_def since we can't
8677 call force_const_mem at top level. */
8679 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8680 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8682 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8685 /* We make sure to pass const0_rtx down if we came in with
8686 ignore set, to avoid doing the cleanups twice for something. */
8687 op0 = expand_expr (TREE_OPERAND (exp, 0),
8688 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8689 (modifier == EXPAND_INITIALIZER
8690 ? modifier : EXPAND_CONST_ADDRESS));
8692 /* If we are going to ignore the result, OP0 will have been set
8693 to const0_rtx, so just return it. Don't get confused and
8694 think we are taking the address of the constant. */
8698 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8699 clever and returns a REG when given a MEM. */
8700 op0 = protect_from_queue (op0, 1);
8702 /* We would like the object in memory. If it is a constant, we can
8703 have it be statically allocated into memory. For a non-constant,
8704 we need to allocate some memory and store the value into it. */
8706 if (CONSTANT_P (op0))
8707 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8709 else if (GET_CODE (op0) == MEM)
8711 mark_temp_addr_taken (op0);
8712 temp = XEXP (op0, 0);
8715 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8716 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8717 || GET_CODE (op0) == PARALLEL)
8719 /* If this object is in a register, it must be not
8721 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8722 tree nt = build_qualified_type (inner_type,
8723 (TYPE_QUALS (inner_type)
8724 | TYPE_QUAL_CONST));
8725 rtx memloc = assign_temp (nt, 1, 1, 1);
8727 mark_temp_addr_taken (memloc);
8728 if (GET_CODE (op0) == PARALLEL)
8729 /* Handle calls that pass values in multiple non-contiguous
8730 locations. The Irix 6 ABI has examples of this. */
8731 emit_group_store (memloc, op0,
8732 int_size_in_bytes (inner_type),
8733 TYPE_ALIGN (inner_type));
8735 emit_move_insn (memloc, op0);
8739 if (GET_CODE (op0) != MEM)
8742 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8744 temp = XEXP (op0, 0);
8745 #ifdef POINTERS_EXTEND_UNSIGNED
8746 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8747 && mode == ptr_mode)
8748 temp = convert_memory_address (ptr_mode, temp);
8753 op0 = force_operand (XEXP (op0, 0), target);
8756 if (flag_force_addr && GET_CODE (op0) != REG)
8757 op0 = force_reg (Pmode, op0);
8759 if (GET_CODE (op0) == REG
8760 && ! REG_USERVAR_P (op0))
8761 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8763 /* If we might have had a temp slot, add an equivalent address
8766 update_temp_slot_address (temp, op0);
8768 #ifdef POINTERS_EXTEND_UNSIGNED
8769 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8770 && mode == ptr_mode)
8771 op0 = convert_memory_address (ptr_mode, op0);
8776 case ENTRY_VALUE_EXPR:
8779 /* COMPLEX type for Extended Pascal & Fortran */
8782 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8785 /* Get the rtx code of the operands. */
8786 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8787 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8790 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8794 /* Move the real (op0) and imaginary (op1) parts to their location. */
8795 emit_move_insn (gen_realpart (mode, target), op0);
8796 emit_move_insn (gen_imagpart (mode, target), op1);
8798 insns = get_insns ();
8801 /* Complex construction should appear as a single unit. */
8802 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8803 each with a separate pseudo as destination.
8804 It's not correct for flow to treat them as a unit. */
8805 if (GET_CODE (target) != CONCAT)
8806 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8814 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8815 return gen_realpart (mode, op0);
8818 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8819 return gen_imagpart (mode, op0);
8823 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8827 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8830 target = gen_reg_rtx (mode);
8834 /* Store the realpart and the negated imagpart to target. */
8835 emit_move_insn (gen_realpart (partmode, target),
8836 gen_realpart (partmode, op0));
8838 imag_t = gen_imagpart (partmode, target);
8839 temp = expand_unop (partmode,
8840 ! unsignedp && flag_trapv
8841 && (GET_MODE_CLASS(partmode) == MODE_INT)
8842 ? negv_optab : neg_optab,
8843 gen_imagpart (partmode, op0), imag_t, 0);
8845 emit_move_insn (imag_t, temp);
8847 insns = get_insns ();
8850 /* Conjugate should appear as a single unit
8851 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8852 each with a separate pseudo as destination.
8853 It's not correct for flow to treat them as a unit. */
8854 if (GET_CODE (target) != CONCAT)
8855 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8862 case TRY_CATCH_EXPR:
8864 tree handler = TREE_OPERAND (exp, 1);
8866 expand_eh_region_start ();
8868 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8870 expand_eh_region_end_cleanup (handler);
8875 case TRY_FINALLY_EXPR:
8877 tree try_block = TREE_OPERAND (exp, 0);
8878 tree finally_block = TREE_OPERAND (exp, 1);
8879 rtx finally_label = gen_label_rtx ();
8880 rtx done_label = gen_label_rtx ();
8881 rtx return_link = gen_reg_rtx (Pmode);
8882 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8883 (tree) finally_label, (tree) return_link);
8884 TREE_SIDE_EFFECTS (cleanup) = 1;
8886 /* Start a new binding layer that will keep track of all cleanup
8887 actions to be performed. */
8888 expand_start_bindings (2);
8890 target_temp_slot_level = temp_slot_level;
8892 expand_decl_cleanup (NULL_TREE, cleanup);
8893 op0 = expand_expr (try_block, target, tmode, modifier);
8895 preserve_temp_slots (op0);
8896 expand_end_bindings (NULL_TREE, 0, 0);
8897 emit_jump (done_label);
8898 emit_label (finally_label);
8899 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8900 emit_indirect_jump (return_link);
8901 emit_label (done_label);
8905 case GOTO_SUBROUTINE_EXPR:
8907 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8908 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8909 rtx return_address = gen_label_rtx ();
8910 emit_move_insn (return_link,
8911 gen_rtx_LABEL_REF (Pmode, return_address));
8913 emit_label (return_address);
8918 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8921 return get_exception_pointer (cfun);
8924 /* Function descriptors are not valid except for as
8925 initialization constants, and should not be expanded. */
8929 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8932 /* Here to do an ordinary binary operator, generating an instruction
8933 from the optab already placed in `this_optab'. */
8935 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8937 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8938 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8940 temp = expand_binop (mode, this_optab, op0, op1, target,
8941 unsignedp, OPTAB_LIB_WIDEN);
8947 /* Similar to expand_expr, except that we don't specify a target, target
8948 mode, or modifier and we return the alignment of the inner type. This is
8949 used in cases where it is not necessary to align the result to the
8950 alignment of its type as long as we know the alignment of the result, for
8951 example for comparisons of BLKmode values. */
8954 expand_expr_unaligned (exp, palign)
8956 unsigned int *palign;
8959 tree type = TREE_TYPE (exp);
8960 enum machine_mode mode = TYPE_MODE (type);
8962 /* Default the alignment we return to that of the type. */
8963 *palign = TYPE_ALIGN (type);
8965 /* The only cases in which we do anything special is if the resulting mode
8967 if (mode != BLKmode)
8968 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8970 switch (TREE_CODE (exp))
8974 case NON_LVALUE_EXPR:
8975 /* Conversions between BLKmode values don't change the underlying
8976 alignment or value. */
8977 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8978 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8982 /* Much of the code for this case is copied directly from expand_expr.
8983 We need to duplicate it here because we will do something different
8984 in the fall-through case, so we need to handle the same exceptions
8987 tree array = TREE_OPERAND (exp, 0);
8988 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8989 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8990 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8993 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8996 /* Optimize the special-case of a zero lower bound.
8998 We convert the low_bound to sizetype to avoid some problems
8999 with constant folding. (E.g. suppose the lower bound is 1,
9000 and its mode is QI. Without the conversion, (ARRAY
9001 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9002 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
9004 if (! integer_zerop (low_bound))
9005 index = size_diffop (index, convert (sizetype, low_bound));
9007 /* If this is a constant index into a constant array,
9008 just get the value from the array. Handle both the cases when
9009 we have an explicit constructor and when our operand is a variable
9010 that was declared const. */
9012 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
9013 && host_integerp (index, 0)
9014 && 0 > compare_tree_int (index,
9015 list_length (CONSTRUCTOR_ELTS
9016 (TREE_OPERAND (exp, 0)))))
9020 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
9021 i = tree_low_cst (index, 0);
9022 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
9026 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
9029 else if (optimize >= 1
9030 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9031 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
9032 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
9034 if (TREE_CODE (index) == INTEGER_CST)
9036 tree init = DECL_INITIAL (array);
9038 if (TREE_CODE (init) == CONSTRUCTOR)
9042 for (elem = CONSTRUCTOR_ELTS (init);
9043 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
9044 elem = TREE_CHAIN (elem))
9048 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
9058 case ARRAY_RANGE_REF:
9059 /* If the operand is a CONSTRUCTOR, we can just extract the
9060 appropriate field if it is present. Don't do this if we have
9061 already written the data since we want to refer to that copy
9062 and varasm.c assumes that's what we'll do. */
9063 if (TREE_CODE (exp) == COMPONENT_REF
9064 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9065 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
9069 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
9070 elt = TREE_CHAIN (elt))
9071 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
9072 /* Note that unlike the case in expand_expr, we know this is
9073 BLKmode and hence not an integer. */
9074 return expand_expr_unaligned (TREE_VALUE (elt), palign);
9078 enum machine_mode mode1;
9079 HOST_WIDE_INT bitsize, bitpos;
9082 unsigned int alignment;
9084 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9085 &mode1, &unsignedp, &volatilep,
9088 /* If we got back the original object, something is wrong. Perhaps
9089 we are evaluating an expression too early. In any event, don't
9090 infinitely recurse. */
9094 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9096 /* If this is a constant, put it into a register if it is a
9097 legitimate constant and OFFSET is 0 and memory if it isn't. */
9098 if (CONSTANT_P (op0))
9100 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9102 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9104 op0 = force_reg (inner_mode, op0);
9106 op0 = validize_mem (force_const_mem (inner_mode, op0));
9111 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9113 /* If this object is in a register, put it into memory.
9114 This case can't occur in C, but can in Ada if we have
9115 unchecked conversion of an expression from a scalar type to
9116 an array or record type. */
9117 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9118 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9120 tree nt = build_qualified_type (TREE_TYPE (tem),
9121 (TYPE_QUALS (TREE_TYPE (tem))
9122 | TYPE_QUAL_CONST));
9123 rtx memloc = assign_temp (nt, 1, 1, 1);
9125 mark_temp_addr_taken (memloc);
9126 emit_move_insn (memloc, op0);
9130 if (GET_CODE (op0) != MEM)
9133 if (GET_MODE (offset_rtx) != ptr_mode)
9134 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9136 #ifdef POINTERS_EXTEND_UNSIGNED
9137 if (GET_MODE (offset_rtx) != Pmode)
9138 offset_rtx = convert_memory_address (Pmode, offset_rtx);
9141 op0 = offset_address (op0, offset_rtx,
9142 highest_pow2_factor (offset));
9145 /* Don't forget about volatility even if this is a bitfield. */
9146 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9148 op0 = copy_rtx (op0);
9149 MEM_VOLATILE_P (op0) = 1;
9152 /* Check the access. */
9153 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9158 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9159 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9161 /* Check the access right of the pointer. */
9162 in_check_memory_usage = 1;
9163 if (size > BITS_PER_UNIT)
9164 emit_library_call (chkr_check_addr_libfunc,
9165 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9166 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9167 TYPE_MODE (sizetype),
9168 GEN_INT (MEMORY_USE_RO),
9169 TYPE_MODE (integer_type_node));
9170 in_check_memory_usage = 0;
9173 /* In cases where an aligned union has an unaligned object
9174 as a field, we might be extracting a BLKmode value from
9175 an integer-mode (e.g., SImode) object. Handle this case
9176 by doing the extract into an object as wide as the field
9177 (which we know to be the width of a basic mode), then
9178 storing into memory, and changing the mode to BLKmode.
9179 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9180 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9181 if (mode1 == VOIDmode
9182 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9183 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9184 && (TYPE_ALIGN (type) > alignment
9185 || bitpos % TYPE_ALIGN (type) != 0)))
9187 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9189 if (ext_mode == BLKmode)
9191 /* In this case, BITPOS must start at a byte boundary. */
9192 if (GET_CODE (op0) != MEM
9193 || bitpos % BITS_PER_UNIT != 0)
9196 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9200 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9202 rtx new = assign_temp (nt, 0, 1, 1);
9204 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9205 unsignedp, NULL_RTX, ext_mode,
9206 ext_mode, alignment,
9207 int_size_in_bytes (TREE_TYPE (tem)));
9209 /* If the result is a record type and BITSIZE is narrower than
9210 the mode of OP0, an integral mode, and this is a big endian
9211 machine, we must put the field into the high-order bits. */
9212 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9213 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9214 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9215 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9216 size_int (GET_MODE_BITSIZE
9221 emit_move_insn (new, op0);
9222 op0 = copy_rtx (new);
9223 PUT_MODE (op0, BLKmode);
9227 /* Get a reference to just this component. */
9228 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9230 set_mem_attributes (op0, exp, 0);
9232 /* Adjust the alignment in case the bit position is not
9233 a multiple of the alignment of the inner object. */
9234 while (bitpos % alignment != 0)
9237 if (GET_CODE (XEXP (op0, 0)) == REG)
9238 mark_reg_pointer (XEXP (op0, 0), alignment);
9240 MEM_IN_STRUCT_P (op0) = 1;
9241 MEM_VOLATILE_P (op0) |= volatilep;
9243 *palign = alignment;
9252 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9255 /* Return the tree node if a ARG corresponds to a string constant or zero
9256 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9257 in bytes within the string that ARG is accessing. The type of the
9258 offset will be `sizetype'. */
9261 string_constant (arg, ptr_offset)
9267 if (TREE_CODE (arg) == ADDR_EXPR
9268 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9270 *ptr_offset = size_zero_node;
9271 return TREE_OPERAND (arg, 0);
9273 else if (TREE_CODE (arg) == PLUS_EXPR)
9275 tree arg0 = TREE_OPERAND (arg, 0);
9276 tree arg1 = TREE_OPERAND (arg, 1);
9281 if (TREE_CODE (arg0) == ADDR_EXPR
9282 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9284 *ptr_offset = convert (sizetype, arg1);
9285 return TREE_OPERAND (arg0, 0);
9287 else if (TREE_CODE (arg1) == ADDR_EXPR
9288 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9290 *ptr_offset = convert (sizetype, arg0);
9291 return TREE_OPERAND (arg1, 0);
9298 /* Expand code for a post- or pre- increment or decrement
9299 and return the RTX for the result.
9300 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9303 expand_increment (exp, post, ignore)
9309 tree incremented = TREE_OPERAND (exp, 0);
9310 optab this_optab = add_optab;
9312 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9313 int op0_is_copy = 0;
9314 int single_insn = 0;
9315 /* 1 means we can't store into OP0 directly,
9316 because it is a subreg narrower than a word,
9317 and we don't dare clobber the rest of the word. */
9320 /* Stabilize any component ref that might need to be
9321 evaluated more than once below. */
9323 || TREE_CODE (incremented) == BIT_FIELD_REF
9324 || (TREE_CODE (incremented) == COMPONENT_REF
9325 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9326 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9327 incremented = stabilize_reference (incremented);
9328 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9329 ones into save exprs so that they don't accidentally get evaluated
9330 more than once by the code below. */
9331 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9332 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9333 incremented = save_expr (incremented);
9335 /* Compute the operands as RTX.
9336 Note whether OP0 is the actual lvalue or a copy of it:
9337 I believe it is a copy iff it is a register or subreg
9338 and insns were generated in computing it. */
9340 temp = get_last_insn ();
9341 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9343 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9344 in place but instead must do sign- or zero-extension during assignment,
9345 so we copy it into a new register and let the code below use it as
9348 Note that we can safely modify this SUBREG since it is know not to be
9349 shared (it was made by the expand_expr call above). */
9351 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9354 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9358 else if (GET_CODE (op0) == SUBREG
9359 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9361 /* We cannot increment this SUBREG in place. If we are
9362 post-incrementing, get a copy of the old value. Otherwise,
9363 just mark that we cannot increment in place. */
9365 op0 = copy_to_reg (op0);
9370 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9371 && temp != get_last_insn ());
9372 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9373 EXPAND_MEMORY_USE_BAD);
9375 /* Decide whether incrementing or decrementing. */
9376 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9377 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9378 this_optab = sub_optab;
9380 /* Convert decrement by a constant into a negative increment. */
9381 if (this_optab == sub_optab
9382 && GET_CODE (op1) == CONST_INT)
9384 op1 = GEN_INT (-INTVAL (op1));
9385 this_optab = add_optab;
9388 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9389 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9391 /* For a preincrement, see if we can do this with a single instruction. */
9394 icode = (int) this_optab->handlers[(int) mode].insn_code;
9395 if (icode != (int) CODE_FOR_nothing
9396 /* Make sure that OP0 is valid for operands 0 and 1
9397 of the insn we want to queue. */
9398 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9399 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9400 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9404 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9405 then we cannot just increment OP0. We must therefore contrive to
9406 increment the original value. Then, for postincrement, we can return
9407 OP0 since it is a copy of the old value. For preincrement, expand here
9408 unless we can do it with a single insn.
9410 Likewise if storing directly into OP0 would clobber high bits
9411 we need to preserve (bad_subreg). */
9412 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9414 /* This is the easiest way to increment the value wherever it is.
9415 Problems with multiple evaluation of INCREMENTED are prevented
9416 because either (1) it is a component_ref or preincrement,
9417 in which case it was stabilized above, or (2) it is an array_ref
9418 with constant index in an array in a register, which is
9419 safe to reevaluate. */
9420 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9421 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9422 ? MINUS_EXPR : PLUS_EXPR),
9425 TREE_OPERAND (exp, 1));
9427 while (TREE_CODE (incremented) == NOP_EXPR
9428 || TREE_CODE (incremented) == CONVERT_EXPR)
9430 newexp = convert (TREE_TYPE (incremented), newexp);
9431 incremented = TREE_OPERAND (incremented, 0);
9434 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9435 return post ? op0 : temp;
9440 /* We have a true reference to the value in OP0.
9441 If there is an insn to add or subtract in this mode, queue it.
9442 Queueing the increment insn avoids the register shuffling
9443 that often results if we must increment now and first save
9444 the old value for subsequent use. */
9446 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9447 op0 = stabilize (op0);
9450 icode = (int) this_optab->handlers[(int) mode].insn_code;
9451 if (icode != (int) CODE_FOR_nothing
9452 /* Make sure that OP0 is valid for operands 0 and 1
9453 of the insn we want to queue. */
9454 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9455 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9457 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9458 op1 = force_reg (mode, op1);
9460 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9462 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9464 rtx addr = (general_operand (XEXP (op0, 0), mode)
9465 ? force_reg (Pmode, XEXP (op0, 0))
9466 : copy_to_reg (XEXP (op0, 0)));
9469 op0 = replace_equiv_address (op0, addr);
9470 temp = force_reg (GET_MODE (op0), op0);
9471 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9472 op1 = force_reg (mode, op1);
9474 /* The increment queue is LIFO, thus we have to `queue'
9475 the instructions in reverse order. */
9476 enqueue_insn (op0, gen_move_insn (op0, temp));
9477 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9482 /* Preincrement, or we can't increment with one simple insn. */
9484 /* Save a copy of the value before inc or dec, to return it later. */
9485 temp = value = copy_to_reg (op0);
9487 /* Arrange to return the incremented value. */
9488 /* Copy the rtx because expand_binop will protect from the queue,
9489 and the results of that would be invalid for us to return
9490 if our caller does emit_queue before using our result. */
9491 temp = copy_rtx (value = op0);
9493 /* Increment however we can. */
9494 op1 = expand_binop (mode, this_optab, value, op1,
9495 current_function_check_memory_usage ? NULL_RTX : op0,
9496 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9497 /* Make sure the value is stored into OP0. */
9499 emit_move_insn (op0, op1);
9504 /* At the start of a function, record that we have no previously-pushed
9505 arguments waiting to be popped. */
9508 init_pending_stack_adjust ()
9510 pending_stack_adjust = 0;
9513 /* When exiting from function, if safe, clear out any pending stack adjust
9514 so the adjustment won't get done.
9516 Note, if the current function calls alloca, then it must have a
9517 frame pointer regardless of the value of flag_omit_frame_pointer. */
9520 clear_pending_stack_adjust ()
9522 #ifdef EXIT_IGNORE_STACK
9524 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9525 && EXIT_IGNORE_STACK
9526 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9527 && ! flag_inline_functions)
9529 stack_pointer_delta -= pending_stack_adjust,
9530 pending_stack_adjust = 0;
9535 /* Pop any previously-pushed arguments that have not been popped yet. */
9538 do_pending_stack_adjust ()
9540 if (inhibit_defer_pop == 0)
9542 if (pending_stack_adjust != 0)
9543 adjust_stack (GEN_INT (pending_stack_adjust));
9544 pending_stack_adjust = 0;
9548 /* Expand conditional expressions. */
9550 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9551 LABEL is an rtx of code CODE_LABEL, in this function and all the
9555 jumpifnot (exp, label)
9559 do_jump (exp, label, NULL_RTX);
9562 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9569 do_jump (exp, NULL_RTX, label);
9572 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9573 the result is zero, or IF_TRUE_LABEL if the result is one.
9574 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9575 meaning fall through in that case.
9577 do_jump always does any pending stack adjust except when it does not
9578 actually perform a jump. An example where there is no jump
9579 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9581 This function is responsible for optimizing cases such as
9582 &&, || and comparison operators in EXP. */
9585 do_jump (exp, if_false_label, if_true_label)
9587 rtx if_false_label, if_true_label;
9589 enum tree_code code = TREE_CODE (exp);
9590 /* Some cases need to create a label to jump to
9591 in order to properly fall through.
9592 These cases set DROP_THROUGH_LABEL nonzero. */
9593 rtx drop_through_label = 0;
9597 enum machine_mode mode;
9599 #ifdef MAX_INTEGER_COMPUTATION_MODE
9600 check_max_integer_computation_mode (exp);
9611 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9617 /* This is not true with #pragma weak */
9619 /* The address of something can never be zero. */
9621 emit_jump (if_true_label);
9626 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9627 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9628 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9629 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9632 /* If we are narrowing the operand, we have to do the compare in the
9634 if ((TYPE_PRECISION (TREE_TYPE (exp))
9635 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9637 case NON_LVALUE_EXPR:
9638 case REFERENCE_EXPR:
9643 /* These cannot change zero->non-zero or vice versa. */
9644 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9647 case WITH_RECORD_EXPR:
9648 /* Put the object on the placeholder list, recurse through our first
9649 operand, and pop the list. */
9650 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9652 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9653 placeholder_list = TREE_CHAIN (placeholder_list);
9657 /* This is never less insns than evaluating the PLUS_EXPR followed by
9658 a test and can be longer if the test is eliminated. */
9660 /* Reduce to minus. */
9661 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9662 TREE_OPERAND (exp, 0),
9663 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9664 TREE_OPERAND (exp, 1))));
9665 /* Process as MINUS. */
9669 /* Non-zero iff operands of minus differ. */
9670 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9671 TREE_OPERAND (exp, 0),
9672 TREE_OPERAND (exp, 1)),
9673 NE, NE, if_false_label, if_true_label);
9677 /* If we are AND'ing with a small constant, do this comparison in the
9678 smallest type that fits. If the machine doesn't have comparisons
9679 that small, it will be converted back to the wider comparison.
9680 This helps if we are testing the sign bit of a narrower object.
9681 combine can't do this for us because it can't know whether a
9682 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9684 if (! SLOW_BYTE_ACCESS
9685 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9686 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9687 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9688 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9689 && (type = type_for_mode (mode, 1)) != 0
9690 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9691 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9692 != CODE_FOR_nothing))
9694 do_jump (convert (type, exp), if_false_label, if_true_label);
9699 case TRUTH_NOT_EXPR:
9700 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9703 case TRUTH_ANDIF_EXPR:
9704 if (if_false_label == 0)
9705 if_false_label = drop_through_label = gen_label_rtx ();
9706 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9707 start_cleanup_deferral ();
9708 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9709 end_cleanup_deferral ();
9712 case TRUTH_ORIF_EXPR:
9713 if (if_true_label == 0)
9714 if_true_label = drop_through_label = gen_label_rtx ();
9715 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9716 start_cleanup_deferral ();
9717 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9718 end_cleanup_deferral ();
9723 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9724 preserve_temp_slots (NULL_RTX);
9728 do_pending_stack_adjust ();
9729 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9735 case ARRAY_RANGE_REF:
9737 HOST_WIDE_INT bitsize, bitpos;
9739 enum machine_mode mode;
9743 unsigned int alignment;
9745 /* Get description of this reference. We don't actually care
9746 about the underlying object here. */
9747 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9748 &unsignedp, &volatilep, &alignment);
9750 type = type_for_size (bitsize, unsignedp);
9751 if (! SLOW_BYTE_ACCESS
9752 && type != 0 && bitsize >= 0
9753 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9754 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9755 != CODE_FOR_nothing))
9757 do_jump (convert (type, exp), if_false_label, if_true_label);
9764 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9765 if (integer_onep (TREE_OPERAND (exp, 1))
9766 && integer_zerop (TREE_OPERAND (exp, 2)))
9767 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9769 else if (integer_zerop (TREE_OPERAND (exp, 1))
9770 && integer_onep (TREE_OPERAND (exp, 2)))
9771 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9775 rtx label1 = gen_label_rtx ();
9776 drop_through_label = gen_label_rtx ();
9778 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9780 start_cleanup_deferral ();
9781 /* Now the THEN-expression. */
9782 do_jump (TREE_OPERAND (exp, 1),
9783 if_false_label ? if_false_label : drop_through_label,
9784 if_true_label ? if_true_label : drop_through_label);
9785 /* In case the do_jump just above never jumps. */
9786 do_pending_stack_adjust ();
9787 emit_label (label1);
9789 /* Now the ELSE-expression. */
9790 do_jump (TREE_OPERAND (exp, 2),
9791 if_false_label ? if_false_label : drop_through_label,
9792 if_true_label ? if_true_label : drop_through_label);
9793 end_cleanup_deferral ();
9799 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9801 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9802 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9804 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9805 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9808 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9809 fold (build (EQ_EXPR, TREE_TYPE (exp),
9810 fold (build1 (REALPART_EXPR,
9811 TREE_TYPE (inner_type),
9813 fold (build1 (REALPART_EXPR,
9814 TREE_TYPE (inner_type),
9816 fold (build (EQ_EXPR, TREE_TYPE (exp),
9817 fold (build1 (IMAGPART_EXPR,
9818 TREE_TYPE (inner_type),
9820 fold (build1 (IMAGPART_EXPR,
9821 TREE_TYPE (inner_type),
9823 if_false_label, if_true_label);
9826 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9827 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9829 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9830 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9831 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9833 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9839 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9841 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9842 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9844 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9845 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9848 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9849 fold (build (NE_EXPR, TREE_TYPE (exp),
9850 fold (build1 (REALPART_EXPR,
9851 TREE_TYPE (inner_type),
9853 fold (build1 (REALPART_EXPR,
9854 TREE_TYPE (inner_type),
9856 fold (build (NE_EXPR, TREE_TYPE (exp),
9857 fold (build1 (IMAGPART_EXPR,
9858 TREE_TYPE (inner_type),
9860 fold (build1 (IMAGPART_EXPR,
9861 TREE_TYPE (inner_type),
9863 if_false_label, if_true_label);
9866 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9867 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9869 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9870 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9871 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9873 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9878 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9879 if (GET_MODE_CLASS (mode) == MODE_INT
9880 && ! can_compare_p (LT, mode, ccp_jump))
9881 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9883 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9887 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9888 if (GET_MODE_CLASS (mode) == MODE_INT
9889 && ! can_compare_p (LE, mode, ccp_jump))
9890 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9892 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9896 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9897 if (GET_MODE_CLASS (mode) == MODE_INT
9898 && ! can_compare_p (GT, mode, ccp_jump))
9899 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9901 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9905 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9906 if (GET_MODE_CLASS (mode) == MODE_INT
9907 && ! can_compare_p (GE, mode, ccp_jump))
9908 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9910 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9913 case UNORDERED_EXPR:
9916 enum rtx_code cmp, rcmp;
9919 if (code == UNORDERED_EXPR)
9920 cmp = UNORDERED, rcmp = ORDERED;
9922 cmp = ORDERED, rcmp = UNORDERED;
9923 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9926 if (! can_compare_p (cmp, mode, ccp_jump)
9927 && (can_compare_p (rcmp, mode, ccp_jump)
9928 /* If the target doesn't provide either UNORDERED or ORDERED
9929 comparisons, canonicalize on UNORDERED for the library. */
9930 || rcmp == UNORDERED))
9934 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9936 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9941 enum rtx_code rcode1;
9942 enum tree_code tcode2;
9966 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9967 if (can_compare_p (rcode1, mode, ccp_jump))
9968 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9972 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9973 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9976 /* If the target doesn't support combined unordered
9977 compares, decompose into UNORDERED + comparison. */
9978 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9979 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9980 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9981 do_jump (exp, if_false_label, if_true_label);
9987 __builtin_expect (<test>, 0) and
9988 __builtin_expect (<test>, 1)
9990 We need to do this here, so that <test> is not converted to a SCC
9991 operation on machines that use condition code registers and COMPARE
9992 like the PowerPC, and then the jump is done based on whether the SCC
9993 operation produced a 1 or 0. */
9995 /* Check for a built-in function. */
9996 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9998 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9999 tree arglist = TREE_OPERAND (exp, 1);
10001 if (TREE_CODE (fndecl) == FUNCTION_DECL
10002 && DECL_BUILT_IN (fndecl)
10003 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10004 && arglist != NULL_TREE
10005 && TREE_CHAIN (arglist) != NULL_TREE)
10007 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10010 if (seq != NULL_RTX)
10017 /* fall through and generate the normal code. */
10021 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10023 /* This is not needed any more and causes poor code since it causes
10024 comparisons and tests from non-SI objects to have different code
10026 /* Copy to register to avoid generating bad insns by cse
10027 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10028 if (!cse_not_expected && GET_CODE (temp) == MEM)
10029 temp = copy_to_reg (temp);
10031 do_pending_stack_adjust ();
10032 /* Do any postincrements in the expression that was tested. */
10035 if (GET_CODE (temp) == CONST_INT
10036 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10037 || GET_CODE (temp) == LABEL_REF)
10039 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10041 emit_jump (target);
10043 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10044 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10045 /* Note swapping the labels gives us not-equal. */
10046 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10047 else if (GET_MODE (temp) != VOIDmode)
10048 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10049 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10050 GET_MODE (temp), NULL_RTX, 0,
10051 if_false_label, if_true_label);
10056 if (drop_through_label)
10058 /* If do_jump produces code that might be jumped around,
10059 do any stack adjusts from that code, before the place
10060 where control merges in. */
10061 do_pending_stack_adjust ();
10062 emit_label (drop_through_label);
10066 /* Given a comparison expression EXP for values too wide to be compared
10067 with one insn, test the comparison and jump to the appropriate label.
10068 The code of EXP is ignored; we always test GT if SWAP is 0,
10069 and LT if SWAP is 1. */
10072 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10075 rtx if_false_label, if_true_label;
10077 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10078 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10079 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10080 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10082 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10085 /* Compare OP0 with OP1, word at a time, in mode MODE.
10086 UNSIGNEDP says to do unsigned comparison.
10087 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10090 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10091 enum machine_mode mode;
10094 rtx if_false_label, if_true_label;
10096 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10097 rtx drop_through_label = 0;
10100 if (! if_true_label || ! if_false_label)
10101 drop_through_label = gen_label_rtx ();
10102 if (! if_true_label)
10103 if_true_label = drop_through_label;
10104 if (! if_false_label)
10105 if_false_label = drop_through_label;
10107 /* Compare a word at a time, high order first. */
10108 for (i = 0; i < nwords; i++)
10110 rtx op0_word, op1_word;
10112 if (WORDS_BIG_ENDIAN)
10114 op0_word = operand_subword_force (op0, i, mode);
10115 op1_word = operand_subword_force (op1, i, mode);
10119 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10120 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10123 /* All but high-order word must be compared as unsigned. */
10124 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10125 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10126 NULL_RTX, if_true_label);
10128 /* Consider lower words only if these are equal. */
10129 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10130 NULL_RTX, 0, NULL_RTX, if_false_label);
10133 if (if_false_label)
10134 emit_jump (if_false_label);
10135 if (drop_through_label)
10136 emit_label (drop_through_label);
10139 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10140 with one insn, test the comparison and jump to the appropriate label. */
10143 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10145 rtx if_false_label, if_true_label;
10147 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10148 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10149 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10150 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10152 rtx drop_through_label = 0;
10154 if (! if_false_label)
10155 drop_through_label = if_false_label = gen_label_rtx ();
10157 for (i = 0; i < nwords; i++)
10158 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10159 operand_subword_force (op1, i, mode),
10160 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10161 word_mode, NULL_RTX, 0, if_false_label,
10165 emit_jump (if_true_label);
10166 if (drop_through_label)
10167 emit_label (drop_through_label);
10170 /* Jump according to whether OP0 is 0.
10171 We assume that OP0 has an integer mode that is too wide
10172 for the available compare insns. */
10175 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10177 rtx if_false_label, if_true_label;
10179 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10182 rtx drop_through_label = 0;
10184 /* The fastest way of doing this comparison on almost any machine is to
10185 "or" all the words and compare the result. If all have to be loaded
10186 from memory and this is a very wide item, it's possible this may
10187 be slower, but that's highly unlikely. */
10189 part = gen_reg_rtx (word_mode);
10190 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10191 for (i = 1; i < nwords && part != 0; i++)
10192 part = expand_binop (word_mode, ior_optab, part,
10193 operand_subword_force (op0, i, GET_MODE (op0)),
10194 part, 1, OPTAB_WIDEN);
10198 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10199 NULL_RTX, 0, if_false_label, if_true_label);
10204 /* If we couldn't do the "or" simply, do this with a series of compares. */
10205 if (! if_false_label)
10206 drop_through_label = if_false_label = gen_label_rtx ();
10208 for (i = 0; i < nwords; i++)
10209 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10210 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10211 if_false_label, NULL_RTX);
10214 emit_jump (if_true_label);
10216 if (drop_through_label)
10217 emit_label (drop_through_label);
10220 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10221 (including code to compute the values to be compared)
10222 and set (CC0) according to the result.
10223 The decision as to signed or unsigned comparison must be made by the caller.
10225 We force a stack adjustment unless there are currently
10226 things pushed on the stack that aren't yet used.
10228 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10231 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10232 size of MODE should be used. */
10235 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10237 enum rtx_code code;
10239 enum machine_mode mode;
10241 unsigned int align;
10245 /* If one operand is constant, make it the second one. Only do this
10246 if the other operand is not constant as well. */
10248 if (swap_commutative_operands_p (op0, op1))
10253 code = swap_condition (code);
10256 if (flag_force_mem)
10258 op0 = force_not_mem (op0);
10259 op1 = force_not_mem (op1);
10262 do_pending_stack_adjust ();
10264 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10265 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10269 /* There's no need to do this now that combine.c can eliminate lots of
10270 sign extensions. This can be less efficient in certain cases on other
10273 /* If this is a signed equality comparison, we can do it as an
10274 unsigned comparison since zero-extension is cheaper than sign
10275 extension and comparisons with zero are done as unsigned. This is
10276 the case even on machines that can do fast sign extension, since
10277 zero-extension is easier to combine with other operations than
10278 sign-extension is. If we are comparing against a constant, we must
10279 convert it to what it would look like unsigned. */
10280 if ((code == EQ || code == NE) && ! unsignedp
10281 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10283 if (GET_CODE (op1) == CONST_INT
10284 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10285 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10290 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10292 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10295 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10296 The decision as to signed or unsigned comparison must be made by the caller.
10298 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10301 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10302 size of MODE should be used. */
10305 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10306 if_false_label, if_true_label)
10308 enum rtx_code code;
10310 enum machine_mode mode;
10312 unsigned int align;
10313 rtx if_false_label, if_true_label;
10316 int dummy_true_label = 0;
10318 /* Reverse the comparison if that is safe and we want to jump if it is
10320 if (! if_true_label && ! FLOAT_MODE_P (mode))
10322 if_true_label = if_false_label;
10323 if_false_label = 0;
10324 code = reverse_condition (code);
10327 /* If one operand is constant, make it the second one. Only do this
10328 if the other operand is not constant as well. */
10330 if (swap_commutative_operands_p (op0, op1))
10335 code = swap_condition (code);
10338 if (flag_force_mem)
10340 op0 = force_not_mem (op0);
10341 op1 = force_not_mem (op1);
10344 do_pending_stack_adjust ();
10346 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10347 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10349 if (tem == const_true_rtx)
10352 emit_jump (if_true_label);
10356 if (if_false_label)
10357 emit_jump (if_false_label);
10363 /* There's no need to do this now that combine.c can eliminate lots of
10364 sign extensions. This can be less efficient in certain cases on other
10367 /* If this is a signed equality comparison, we can do it as an
10368 unsigned comparison since zero-extension is cheaper than sign
10369 extension and comparisons with zero are done as unsigned. This is
10370 the case even on machines that can do fast sign extension, since
10371 zero-extension is easier to combine with other operations than
10372 sign-extension is. If we are comparing against a constant, we must
10373 convert it to what it would look like unsigned. */
10374 if ((code == EQ || code == NE) && ! unsignedp
10375 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10377 if (GET_CODE (op1) == CONST_INT
10378 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10379 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10384 if (! if_true_label)
10386 dummy_true_label = 1;
10387 if_true_label = gen_label_rtx ();
10390 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10393 if (if_false_label)
10394 emit_jump (if_false_label);
10395 if (dummy_true_label)
10396 emit_label (if_true_label);
10399 /* Generate code for a comparison expression EXP (including code to compute
10400 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10401 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10402 generated code will drop through.
10403 SIGNED_CODE should be the rtx operation for this comparison for
10404 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10406 We force a stack adjustment unless there are currently
10407 things pushed on the stack that aren't yet used. */
10410 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10413 enum rtx_code signed_code, unsigned_code;
10414 rtx if_false_label, if_true_label;
10416 unsigned int align0, align1;
10419 enum machine_mode mode;
10421 enum rtx_code code;
10423 /* Don't crash if the comparison was erroneous. */
10424 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10425 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10428 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10429 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10432 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10433 mode = TYPE_MODE (type);
10434 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10435 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10436 || (GET_MODE_BITSIZE (mode)
10437 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10440 /* op0 might have been replaced by promoted constant, in which
10441 case the type of second argument should be used. */
10442 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10443 mode = TYPE_MODE (type);
10445 unsignedp = TREE_UNSIGNED (type);
10446 code = unsignedp ? unsigned_code : signed_code;
10448 #ifdef HAVE_canonicalize_funcptr_for_compare
10449 /* If function pointers need to be "canonicalized" before they can
10450 be reliably compared, then canonicalize them. */
10451 if (HAVE_canonicalize_funcptr_for_compare
10452 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10453 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10456 rtx new_op0 = gen_reg_rtx (mode);
10458 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10462 if (HAVE_canonicalize_funcptr_for_compare
10463 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10464 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10467 rtx new_op1 = gen_reg_rtx (mode);
10469 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10474 /* Do any postincrements in the expression that was tested. */
10477 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10479 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10480 MIN (align0, align1),
10481 if_false_label, if_true_label);
10484 /* Generate code to calculate EXP using a store-flag instruction
10485 and return an rtx for the result. EXP is either a comparison
10486 or a TRUTH_NOT_EXPR whose operand is a comparison.
10488 If TARGET is nonzero, store the result there if convenient.
10490 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10493 Return zero if there is no suitable set-flag instruction
10494 available on this machine.
10496 Once expand_expr has been called on the arguments of the comparison,
10497 we are committed to doing the store flag, since it is not safe to
10498 re-evaluate the expression. We emit the store-flag insn by calling
10499 emit_store_flag, but only expand the arguments if we have a reason
10500 to believe that emit_store_flag will be successful. If we think that
10501 it will, but it isn't, we have to simulate the store-flag with a
10502 set/jump/set sequence. */
10505 do_store_flag (exp, target, mode, only_cheap)
10508 enum machine_mode mode;
10511 enum rtx_code code;
10512 tree arg0, arg1, type;
10514 enum machine_mode operand_mode;
10518 enum insn_code icode;
10519 rtx subtarget = target;
10522 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10523 result at the end. We can't simply invert the test since it would
10524 have already been inverted if it were valid. This case occurs for
10525 some floating-point comparisons. */
10527 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10528 invert = 1, exp = TREE_OPERAND (exp, 0);
10530 arg0 = TREE_OPERAND (exp, 0);
10531 arg1 = TREE_OPERAND (exp, 1);
10533 /* Don't crash if the comparison was erroneous. */
10534 if (arg0 == error_mark_node || arg1 == error_mark_node)
10537 type = TREE_TYPE (arg0);
10538 operand_mode = TYPE_MODE (type);
10539 unsignedp = TREE_UNSIGNED (type);
10541 /* We won't bother with BLKmode store-flag operations because it would mean
10542 passing a lot of information to emit_store_flag. */
10543 if (operand_mode == BLKmode)
10546 /* We won't bother with store-flag operations involving function pointers
10547 when function pointers must be canonicalized before comparisons. */
10548 #ifdef HAVE_canonicalize_funcptr_for_compare
10549 if (HAVE_canonicalize_funcptr_for_compare
10550 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10551 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10553 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10554 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10555 == FUNCTION_TYPE))))
10562 /* Get the rtx comparison code to use. We know that EXP is a comparison
10563 operation of some type. Some comparisons against 1 and -1 can be
10564 converted to comparisons with zero. Do so here so that the tests
10565 below will be aware that we have a comparison with zero. These
10566 tests will not catch constants in the first operand, but constants
10567 are rarely passed as the first operand. */
10569 switch (TREE_CODE (exp))
10578 if (integer_onep (arg1))
10579 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10581 code = unsignedp ? LTU : LT;
10584 if (! unsignedp && integer_all_onesp (arg1))
10585 arg1 = integer_zero_node, code = LT;
10587 code = unsignedp ? LEU : LE;
10590 if (! unsignedp && integer_all_onesp (arg1))
10591 arg1 = integer_zero_node, code = GE;
10593 code = unsignedp ? GTU : GT;
10596 if (integer_onep (arg1))
10597 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10599 code = unsignedp ? GEU : GE;
10602 case UNORDERED_EXPR:
10628 /* Put a constant second. */
10629 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10631 tem = arg0; arg0 = arg1; arg1 = tem;
10632 code = swap_condition (code);
10635 /* If this is an equality or inequality test of a single bit, we can
10636 do this by shifting the bit being tested to the low-order bit and
10637 masking the result with the constant 1. If the condition was EQ,
10638 we xor it with 1. This does not require an scc insn and is faster
10639 than an scc insn even if we have it. */
10641 if ((code == NE || code == EQ)
10642 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10643 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10645 tree inner = TREE_OPERAND (arg0, 0);
10646 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10649 /* If INNER is a right shift of a constant and it plus BITNUM does
10650 not overflow, adjust BITNUM and INNER. */
10652 if (TREE_CODE (inner) == RSHIFT_EXPR
10653 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10654 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10655 && bitnum < TYPE_PRECISION (type)
10656 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10657 bitnum - TYPE_PRECISION (type)))
10659 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10660 inner = TREE_OPERAND (inner, 0);
10663 /* If we are going to be able to omit the AND below, we must do our
10664 operations as unsigned. If we must use the AND, we have a choice.
10665 Normally unsigned is faster, but for some machines signed is. */
10666 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10667 #ifdef LOAD_EXTEND_OP
10668 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10674 if (! get_subtarget (subtarget)
10675 || GET_MODE (subtarget) != operand_mode
10676 || ! safe_from_p (subtarget, inner, 1))
10679 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10682 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10683 size_int (bitnum), subtarget, ops_unsignedp);
10685 if (GET_MODE (op0) != mode)
10686 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10688 if ((code == EQ && ! invert) || (code == NE && invert))
10689 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10690 ops_unsignedp, OPTAB_LIB_WIDEN);
10692 /* Put the AND last so it can combine with more things. */
10693 if (bitnum != TYPE_PRECISION (type) - 1)
10694 op0 = expand_and (op0, const1_rtx, subtarget);
10699 /* Now see if we are likely to be able to do this. Return if not. */
10700 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10703 icode = setcc_gen_code[(int) code];
10704 if (icode == CODE_FOR_nothing
10705 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10707 /* We can only do this if it is one of the special cases that
10708 can be handled without an scc insn. */
10709 if ((code == LT && integer_zerop (arg1))
10710 || (! only_cheap && code == GE && integer_zerop (arg1)))
10712 else if (BRANCH_COST >= 0
10713 && ! only_cheap && (code == NE || code == EQ)
10714 && TREE_CODE (type) != REAL_TYPE
10715 && ((abs_optab->handlers[(int) operand_mode].insn_code
10716 != CODE_FOR_nothing)
10717 || (ffs_optab->handlers[(int) operand_mode].insn_code
10718 != CODE_FOR_nothing)))
10724 if (! get_subtarget (target)
10725 || GET_MODE (subtarget) != operand_mode
10726 || ! safe_from_p (subtarget, arg1, 1))
10729 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10730 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10733 target = gen_reg_rtx (mode);
10735 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10736 because, if the emit_store_flag does anything it will succeed and
10737 OP0 and OP1 will not be used subsequently. */
10739 result = emit_store_flag (target, code,
10740 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10741 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10742 operand_mode, unsignedp, 1);
10747 result = expand_binop (mode, xor_optab, result, const1_rtx,
10748 result, 0, OPTAB_LIB_WIDEN);
10752 /* If this failed, we have to do this with set/compare/jump/set code. */
10753 if (GET_CODE (target) != REG
10754 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10755 target = gen_reg_rtx (GET_MODE (target));
10757 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10758 result = compare_from_rtx (op0, op1, code, unsignedp,
10759 operand_mode, NULL_RTX, 0);
10760 if (GET_CODE (result) == CONST_INT)
10761 return (((result == const0_rtx && ! invert)
10762 || (result != const0_rtx && invert))
10763 ? const0_rtx : const1_rtx);
10765 label = gen_label_rtx ();
10766 if (bcc_gen_fctn[(int) code] == 0)
10769 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10770 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10771 emit_label (label);
10777 /* Stubs in case we haven't got a casesi insn. */
10778 #ifndef HAVE_casesi
10779 # define HAVE_casesi 0
10780 # define gen_casesi(a, b, c, d, e) (0)
10781 # define CODE_FOR_casesi CODE_FOR_nothing
10784 /* If the machine does not have a case insn that compares the bounds,
10785 this means extra overhead for dispatch tables, which raises the
10786 threshold for using them. */
10787 #ifndef CASE_VALUES_THRESHOLD
10788 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10789 #endif /* CASE_VALUES_THRESHOLD */
10792 case_values_threshold ()
10794 return CASE_VALUES_THRESHOLD;
10797 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10798 0 otherwise (i.e. if there is no casesi instruction). */
10800 try_casesi (index_type, index_expr, minval, range,
10801 table_label, default_label)
10802 tree index_type, index_expr, minval, range;
10803 rtx table_label ATTRIBUTE_UNUSED;
10806 enum machine_mode index_mode = SImode;
10807 int index_bits = GET_MODE_BITSIZE (index_mode);
10808 rtx op1, op2, index;
10809 enum machine_mode op_mode;
10814 /* Convert the index to SImode. */
10815 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10817 enum machine_mode omode = TYPE_MODE (index_type);
10818 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10820 /* We must handle the endpoints in the original mode. */
10821 index_expr = build (MINUS_EXPR, index_type,
10822 index_expr, minval);
10823 minval = integer_zero_node;
10824 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10825 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10826 omode, 1, 0, default_label);
10827 /* Now we can safely truncate. */
10828 index = convert_to_mode (index_mode, index, 0);
10832 if (TYPE_MODE (index_type) != index_mode)
10834 index_expr = convert (type_for_size (index_bits, 0),
10836 index_type = TREE_TYPE (index_expr);
10839 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10842 index = protect_from_queue (index, 0);
10843 do_pending_stack_adjust ();
10845 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10846 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10848 index = copy_to_mode_reg (op_mode, index);
10850 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10852 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10853 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10854 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10855 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10857 op1 = copy_to_mode_reg (op_mode, op1);
10859 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10861 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10862 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10863 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10864 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10866 op2 = copy_to_mode_reg (op_mode, op2);
10868 emit_jump_insn (gen_casesi (index, op1, op2,
10869 table_label, default_label));
10873 /* Attempt to generate a tablejump instruction; same concept. */
10874 #ifndef HAVE_tablejump
10875 #define HAVE_tablejump 0
10876 #define gen_tablejump(x, y) (0)
10879 /* Subroutine of the next function.
10881 INDEX is the value being switched on, with the lowest value
10882 in the table already subtracted.
10883 MODE is its expected mode (needed if INDEX is constant).
10884 RANGE is the length of the jump table.
10885 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10887 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10888 index value is out of range. */
10891 do_tablejump (index, mode, range, table_label, default_label)
10892 rtx index, range, table_label, default_label;
10893 enum machine_mode mode;
10897 /* Do an unsigned comparison (in the proper mode) between the index
10898 expression and the value which represents the length of the range.
10899 Since we just finished subtracting the lower bound of the range
10900 from the index expression, this comparison allows us to simultaneously
10901 check that the original index expression value is both greater than
10902 or equal to the minimum value of the range and less than or equal to
10903 the maximum value of the range. */
10905 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10908 /* If index is in range, it must fit in Pmode.
10909 Convert to Pmode so we can index with it. */
10911 index = convert_to_mode (Pmode, index, 1);
10913 /* Don't let a MEM slip thru, because then INDEX that comes
10914 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10915 and break_out_memory_refs will go to work on it and mess it up. */
10916 #ifdef PIC_CASE_VECTOR_ADDRESS
10917 if (flag_pic && GET_CODE (index) != REG)
10918 index = copy_to_mode_reg (Pmode, index);
10921 /* If flag_force_addr were to affect this address
10922 it could interfere with the tricky assumptions made
10923 about addresses that contain label-refs,
10924 which may be valid only very near the tablejump itself. */
10925 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10926 GET_MODE_SIZE, because this indicates how large insns are. The other
10927 uses should all be Pmode, because they are addresses. This code
10928 could fail if addresses and insns are not the same size. */
10929 index = gen_rtx_PLUS (Pmode,
10930 gen_rtx_MULT (Pmode, index,
10931 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10932 gen_rtx_LABEL_REF (Pmode, table_label));
10933 #ifdef PIC_CASE_VECTOR_ADDRESS
10935 index = PIC_CASE_VECTOR_ADDRESS (index);
10938 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10939 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10940 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10941 RTX_UNCHANGING_P (vector) = 1;
10942 convert_move (temp, vector, 0);
10944 emit_jump_insn (gen_tablejump (temp, table_label));
10946 /* If we are generating PIC code or if the table is PC-relative, the
10947 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10948 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10953 try_tablejump (index_type, index_expr, minval, range,
10954 table_label, default_label)
10955 tree index_type, index_expr, minval, range;
10956 rtx table_label, default_label;
10960 if (! HAVE_tablejump)
10963 index_expr = fold (build (MINUS_EXPR, index_type,
10964 convert (index_type, index_expr),
10965 convert (index_type, minval)));
10966 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10968 index = protect_from_queue (index, 0);
10969 do_pending_stack_adjust ();
10971 do_tablejump (index, TYPE_MODE (index_type),
10972 convert_modes (TYPE_MODE (index_type),
10973 TYPE_MODE (TREE_TYPE (range)),
10974 expand_expr (range, NULL_RTX,
10976 TREE_UNSIGNED (TREE_TYPE (range))),
10977 table_label, default_label);