1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "typeclass.h"
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
63 #define STACK_PUSH_CODE PRE_INC
67 /* Assume that case vectors are not pc-relative. */
68 #ifndef CASE_VECTOR_PC_RELATIVE
69 #define CASE_VECTOR_PC_RELATIVE 0
72 /* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
79 int (*lang_safe_from_p) PARAMS ((rtx, tree));
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* Don't check memory usage, since code is being emitted to check a memory
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
92 static int in_check_memory_usage;
94 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95 static tree placeholder_list = 0;
97 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
114 /* This structure is used by store_by_pieces to describe the clear to
117 struct store_by_pieces
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
130 extern struct obstack permanent_obstack;
132 static rtx get_push_address PARAMS ((int));
134 static rtx enqueue_insn PARAMS ((rtx, rtx));
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
148 struct store_by_pieces *));
149 static rtx get_subtarget PARAMS ((rtx));
150 static int is_zeros_p PARAMS ((tree));
151 static int mostly_zeros_p PARAMS ((tree));
152 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
153 HOST_WIDE_INT, enum machine_mode,
154 tree, tree, unsigned int, int,
156 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
158 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int,
161 unsigned int, HOST_WIDE_INT, int));
162 static enum memory_use_mode
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164 static tree save_noncopied_parts PARAMS ((tree, tree));
165 static tree init_noncopied_parts PARAMS ((tree, tree));
166 static int fixed_type_p PARAMS ((tree));
167 static rtx var_rtx PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
184 /* If a memory-to-memory move would take MOVE_RATIO or more simple
185 move-instruction sequences, we will do a movstr or libcall instead. */
188 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
191 /* If we are optimizing for space (-Os), cut down the default move ratio. */
192 #define MOVE_RATIO (optimize_size ? 3 : 15)
196 /* This macro is used to determine whether move_by_pieces should be called
197 to perform a structure copy. */
198 #ifndef MOVE_BY_PIECES_P
199 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
203 /* This array records the insn_code of insns to perform block moves. */
204 enum insn_code movstr_optab[NUM_MACHINE_MODES];
206 /* This array records the insn_code of insns to perform block clears. */
207 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
209 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
211 #ifndef SLOW_UNALIGNED_ACCESS
212 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
215 /* This is run once per compilation to set up which modes can be used
216 directly in memory and to initialize the block move optab. */
222 enum machine_mode mode;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
235 pat = PATTERN (insn);
237 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
238 mode = (enum machine_mode) ((int) mode + 1))
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
245 PUT_MODE (mem1, mode);
247 /* See if there is some register that can be used in this mode and
248 directly loaded or stored from memory. */
250 if (mode != VOIDmode && mode != BLKmode)
251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
252 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
255 if (! HARD_REGNO_MODE_OK (regno, mode))
258 reg = gen_rtx_REG (mode, regno);
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
265 SET_SRC (pat) = mem1;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
271 SET_DEST (pat) = mem;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
276 SET_DEST (pat) = mem1;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
285 /* This is run at the start of compiling a function. */
290 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
293 pending_stack_adjust = 0;
294 stack_pointer_delta = 0;
295 inhibit_defer_pop = 0;
297 apply_args_value = 0;
303 struct expr_status *p;
308 ggc_mark_rtx (p->x_saveregs_value);
309 ggc_mark_rtx (p->x_apply_args_value);
310 ggc_mark_rtx (p->x_forced_labels);
321 /* Small sanity check that the queue is empty at the end of a function. */
324 finish_expr_for_function ()
330 /* Manage the queue of increment instructions to be output
331 for POSTINCREMENT_EXPR expressions, etc. */
333 /* Queue up to increment (or change) VAR later. BODY says how:
334 BODY should be the same thing you would pass to emit_insn
335 to increment right away. It will go to emit_insn later on.
337 The value is a QUEUED expression to be used in place of VAR
338 where you want to guarantee the pre-incrementation value of VAR. */
341 enqueue_insn (var, body)
344 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
345 body, pending_chain);
346 return pending_chain;
349 /* Use protect_from_queue to convert a QUEUED expression
350 into something that you can put immediately into an instruction.
351 If the queued incrementation has not happened yet,
352 protect_from_queue returns the variable itself.
353 If the incrementation has happened, protect_from_queue returns a temp
354 that contains a copy of the old value of the variable.
356 Any time an rtx which might possibly be a QUEUED is to be put
357 into an instruction, it must be passed through protect_from_queue first.
358 QUEUED expressions are not meaningful in instructions.
360 Do not pass a value through protect_from_queue and then hold
361 on to it for a while before putting it in an instruction!
362 If the queue is flushed in between, incorrect code will result. */
365 protect_from_queue (x, modify)
369 register RTX_CODE code = GET_CODE (x);
371 #if 0 /* A QUEUED can hang around after the queue is forced out. */
372 /* Shortcut for most common case. */
373 if (pending_chain == 0)
379 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
380 use of autoincrement. Make a copy of the contents of the memory
381 location rather than a copy of the address, but not if the value is
382 of mode BLKmode. Don't modify X in place since it might be
384 if (code == MEM && GET_MODE (x) != BLKmode
385 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
387 register rtx y = XEXP (x, 0);
388 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
390 MEM_COPY_ATTRIBUTES (new, x);
394 register rtx temp = gen_reg_rtx (GET_MODE (new));
395 emit_insn_before (gen_move_insn (temp, new),
399 /* Copy the address into a pseudo, so that the returned value
400 remains correct across calls to emit_queue. */
401 XEXP (new, 0) = copy_to_reg (XEXP (new, 0));
404 /* Otherwise, recursively protect the subexpressions of all
405 the kinds of rtx's that can contain a QUEUED. */
408 rtx tem = protect_from_queue (XEXP (x, 0), 0);
409 if (tem != XEXP (x, 0))
415 else if (code == PLUS || code == MULT)
417 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
418 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
419 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
428 /* If the increment has not happened, use the variable itself. Copy it
429 into a new pseudo so that the value remains correct across calls to
431 if (QUEUED_INSN (x) == 0)
432 return copy_to_reg (QUEUED_VAR (x));
433 /* If the increment has happened and a pre-increment copy exists,
435 if (QUEUED_COPY (x) != 0)
436 return QUEUED_COPY (x);
437 /* The increment has happened but we haven't set up a pre-increment copy.
438 Set one up now, and use it. */
439 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
440 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
442 return QUEUED_COPY (x);
445 /* Return nonzero if X contains a QUEUED expression:
446 if it contains anything that will be altered by a queued increment.
447 We handle only combinations of MEM, PLUS, MINUS and MULT operators
448 since memory addresses generally contain only those. */
454 register enum rtx_code code = GET_CODE (x);
460 return queued_subexp_p (XEXP (x, 0));
464 return (queued_subexp_p (XEXP (x, 0))
465 || queued_subexp_p (XEXP (x, 1)));
471 /* Perform all the pending incrementations. */
477 while ((p = pending_chain))
479 rtx body = QUEUED_BODY (p);
481 if (GET_CODE (body) == SEQUENCE)
483 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
484 emit_insn (QUEUED_BODY (p));
487 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
488 pending_chain = QUEUED_NEXT (p);
492 /* Copy data from FROM to TO, where the machine modes are not the same.
493 Both modes may be integer, or both may be floating.
494 UNSIGNEDP should be nonzero if FROM is an unsigned type.
495 This causes zero-extension instead of sign-extension. */
498 convert_move (to, from, unsignedp)
499 register rtx to, from;
502 enum machine_mode to_mode = GET_MODE (to);
503 enum machine_mode from_mode = GET_MODE (from);
504 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
505 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
509 /* rtx code for making an equivalent value. */
510 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
512 to = protect_from_queue (to, 1);
513 from = protect_from_queue (from, 0);
515 if (to_real != from_real)
518 /* If FROM is a SUBREG that indicates that we have already done at least
519 the required extension, strip it. We don't handle such SUBREGs as
522 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
523 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
524 >= GET_MODE_SIZE (to_mode))
525 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
526 from = gen_lowpart (to_mode, from), from_mode = to_mode;
528 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
531 if (to_mode == from_mode
532 || (from_mode == VOIDmode && CONSTANT_P (from)))
534 emit_move_insn (to, from);
538 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
540 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
543 if (VECTOR_MODE_P (to_mode))
544 from = gen_rtx_SUBREG (to_mode, from, 0);
546 to = gen_rtx_SUBREG (from_mode, to, 0);
548 emit_move_insn (to, from);
552 if (to_real != from_real)
559 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
561 /* Try converting directly if the insn is supported. */
562 if ((code = can_extend_p (to_mode, from_mode, 0))
565 emit_unop_insn (code, to, from, UNKNOWN);
570 #ifdef HAVE_trunchfqf2
571 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
573 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
577 #ifdef HAVE_trunctqfqf2
578 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
580 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
584 #ifdef HAVE_truncsfqf2
585 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
587 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
591 #ifdef HAVE_truncdfqf2
592 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
594 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
598 #ifdef HAVE_truncxfqf2
599 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
601 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
605 #ifdef HAVE_trunctfqf2
606 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
608 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
613 #ifdef HAVE_trunctqfhf2
614 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
616 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
620 #ifdef HAVE_truncsfhf2
621 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
623 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
627 #ifdef HAVE_truncdfhf2
628 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
630 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
634 #ifdef HAVE_truncxfhf2
635 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
637 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
641 #ifdef HAVE_trunctfhf2
642 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
644 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
649 #ifdef HAVE_truncsftqf2
650 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
652 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
656 #ifdef HAVE_truncdftqf2
657 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
659 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
663 #ifdef HAVE_truncxftqf2
664 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
666 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
670 #ifdef HAVE_trunctftqf2
671 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
673 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
678 #ifdef HAVE_truncdfsf2
679 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
681 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
685 #ifdef HAVE_truncxfsf2
686 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
688 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
692 #ifdef HAVE_trunctfsf2
693 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
695 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
699 #ifdef HAVE_truncxfdf2
700 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
702 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
706 #ifdef HAVE_trunctfdf2
707 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
709 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
721 libcall = extendsfdf2_libfunc;
725 libcall = extendsfxf2_libfunc;
729 libcall = extendsftf2_libfunc;
741 libcall = truncdfsf2_libfunc;
745 libcall = extenddfxf2_libfunc;
749 libcall = extenddftf2_libfunc;
761 libcall = truncxfsf2_libfunc;
765 libcall = truncxfdf2_libfunc;
777 libcall = trunctfsf2_libfunc;
781 libcall = trunctfdf2_libfunc;
793 if (libcall == (rtx) 0)
794 /* This conversion is not implemented yet. */
798 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
800 insns = get_insns ();
802 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
807 /* Now both modes are integers. */
809 /* Handle expanding beyond a word. */
810 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
811 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
818 enum machine_mode lowpart_mode;
819 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
821 /* Try converting directly if the insn is supported. */
822 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
825 /* If FROM is a SUBREG, put it into a register. Do this
826 so that we always generate the same set of insns for
827 better cse'ing; if an intermediate assignment occurred,
828 we won't be doing the operation directly on the SUBREG. */
829 if (optimize > 0 && GET_CODE (from) == SUBREG)
830 from = force_reg (from_mode, from);
831 emit_unop_insn (code, to, from, equiv_code);
834 /* Next, try converting via full word. */
835 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
836 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
837 != CODE_FOR_nothing))
839 if (GET_CODE (to) == REG)
840 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
841 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
842 emit_unop_insn (code, to,
843 gen_lowpart (word_mode, to), equiv_code);
847 /* No special multiword conversion insn; do it by hand. */
850 /* Since we will turn this into a no conflict block, we must ensure
851 that the source does not overlap the target. */
853 if (reg_overlap_mentioned_p (to, from))
854 from = force_reg (from_mode, from);
856 /* Get a copy of FROM widened to a word, if necessary. */
857 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
858 lowpart_mode = word_mode;
860 lowpart_mode = from_mode;
862 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
864 lowpart = gen_lowpart (lowpart_mode, to);
865 emit_move_insn (lowpart, lowfrom);
867 /* Compute the value to put in each remaining word. */
869 fill_value = const0_rtx;
874 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
875 && STORE_FLAG_VALUE == -1)
877 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
879 fill_value = gen_reg_rtx (word_mode);
880 emit_insn (gen_slt (fill_value));
886 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
887 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
889 fill_value = convert_to_mode (word_mode, fill_value, 1);
893 /* Fill the remaining words. */
894 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
896 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
897 rtx subword = operand_subword (to, index, 1, to_mode);
902 if (fill_value != subword)
903 emit_move_insn (subword, fill_value);
906 insns = get_insns ();
909 emit_no_conflict_block (insns, to, from, NULL_RTX,
910 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
914 /* Truncating multi-word to a word or less. */
915 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
916 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
918 if (!((GET_CODE (from) == MEM
919 && ! MEM_VOLATILE_P (from)
920 && direct_load[(int) to_mode]
921 && ! mode_dependent_address_p (XEXP (from, 0)))
922 || GET_CODE (from) == REG
923 || GET_CODE (from) == SUBREG))
924 from = force_reg (from_mode, from);
925 convert_move (to, gen_lowpart (word_mode, from), 0);
929 /* Handle pointer conversion. */ /* SPEE 900220. */
930 if (to_mode == PQImode)
932 if (from_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
935 #ifdef HAVE_truncqipqi2
936 if (HAVE_truncqipqi2)
938 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
941 #endif /* HAVE_truncqipqi2 */
945 if (from_mode == PQImode)
947 if (to_mode != QImode)
949 from = convert_to_mode (QImode, from, unsignedp);
954 #ifdef HAVE_extendpqiqi2
955 if (HAVE_extendpqiqi2)
957 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
960 #endif /* HAVE_extendpqiqi2 */
965 if (to_mode == PSImode)
967 if (from_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
970 #ifdef HAVE_truncsipsi2
971 if (HAVE_truncsipsi2)
973 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
976 #endif /* HAVE_truncsipsi2 */
980 if (from_mode == PSImode)
982 if (to_mode != SImode)
984 from = convert_to_mode (SImode, from, unsignedp);
989 #ifdef HAVE_extendpsisi2
990 if (! unsignedp && HAVE_extendpsisi2)
992 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
995 #endif /* HAVE_extendpsisi2 */
996 #ifdef HAVE_zero_extendpsisi2
997 if (unsignedp && HAVE_zero_extendpsisi2)
999 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1002 #endif /* HAVE_zero_extendpsisi2 */
1007 if (to_mode == PDImode)
1009 if (from_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1012 #ifdef HAVE_truncdipdi2
1013 if (HAVE_truncdipdi2)
1015 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1018 #endif /* HAVE_truncdipdi2 */
1022 if (from_mode == PDImode)
1024 if (to_mode != DImode)
1026 from = convert_to_mode (DImode, from, unsignedp);
1031 #ifdef HAVE_extendpdidi2
1032 if (HAVE_extendpdidi2)
1034 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1037 #endif /* HAVE_extendpdidi2 */
1042 /* Now follow all the conversions between integers
1043 no more than a word long. */
1045 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1046 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1047 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1048 GET_MODE_BITSIZE (from_mode)))
1050 if (!((GET_CODE (from) == MEM
1051 && ! MEM_VOLATILE_P (from)
1052 && direct_load[(int) to_mode]
1053 && ! mode_dependent_address_p (XEXP (from, 0)))
1054 || GET_CODE (from) == REG
1055 || GET_CODE (from) == SUBREG))
1056 from = force_reg (from_mode, from);
1057 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1058 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1059 from = copy_to_reg (from);
1060 emit_move_insn (to, gen_lowpart (to_mode, from));
1064 /* Handle extension. */
1065 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1067 /* Convert directly if that works. */
1068 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1069 != CODE_FOR_nothing)
1071 emit_unop_insn (code, to, from, equiv_code);
1076 enum machine_mode intermediate;
1080 /* Search for a mode to convert via. */
1081 for (intermediate = from_mode; intermediate != VOIDmode;
1082 intermediate = GET_MODE_WIDER_MODE (intermediate))
1083 if (((can_extend_p (to_mode, intermediate, unsignedp)
1084 != CODE_FOR_nothing)
1085 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1086 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1087 GET_MODE_BITSIZE (intermediate))))
1088 && (can_extend_p (intermediate, from_mode, unsignedp)
1089 != CODE_FOR_nothing))
1091 convert_move (to, convert_to_mode (intermediate, from,
1092 unsignedp), unsignedp);
1096 /* No suitable intermediate mode.
1097 Generate what we need with shifts. */
1098 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1099 - GET_MODE_BITSIZE (from_mode), 0);
1100 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1101 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1103 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1106 emit_move_insn (to, tmp);
1111 /* Support special truncate insns for certain modes. */
1113 if (from_mode == DImode && to_mode == SImode)
1115 #ifdef HAVE_truncdisi2
1116 if (HAVE_truncdisi2)
1118 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 if (from_mode == DImode && to_mode == HImode)
1128 #ifdef HAVE_truncdihi2
1129 if (HAVE_truncdihi2)
1131 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 if (from_mode == DImode && to_mode == QImode)
1141 #ifdef HAVE_truncdiqi2
1142 if (HAVE_truncdiqi2)
1144 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 if (from_mode == SImode && to_mode == HImode)
1154 #ifdef HAVE_truncsihi2
1155 if (HAVE_truncsihi2)
1157 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 if (from_mode == SImode && to_mode == QImode)
1167 #ifdef HAVE_truncsiqi2
1168 if (HAVE_truncsiqi2)
1170 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 if (from_mode == HImode && to_mode == QImode)
1180 #ifdef HAVE_trunchiqi2
1181 if (HAVE_trunchiqi2)
1183 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 if (from_mode == TImode && to_mode == DImode)
1193 #ifdef HAVE_trunctidi2
1194 if (HAVE_trunctidi2)
1196 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 if (from_mode == TImode && to_mode == SImode)
1206 #ifdef HAVE_trunctisi2
1207 if (HAVE_trunctisi2)
1209 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 if (from_mode == TImode && to_mode == HImode)
1219 #ifdef HAVE_trunctihi2
1220 if (HAVE_trunctihi2)
1222 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 if (from_mode == TImode && to_mode == QImode)
1232 #ifdef HAVE_trunctiqi2
1233 if (HAVE_trunctiqi2)
1235 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1239 convert_move (to, force_reg (from_mode, from), unsignedp);
1243 /* Handle truncation of volatile memrefs, and so on;
1244 the things that couldn't be truncated directly,
1245 and for which there was no special instruction. */
1246 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1248 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1249 emit_move_insn (to, temp);
1253 /* Mode combination is not recognized. */
1257 /* Return an rtx for a value that would result
1258 from converting X to mode MODE.
1259 Both X and MODE may be floating, or both integer.
1260 UNSIGNEDP is nonzero if X is an unsigned value.
1261 This can be done by referring to a part of X in place
1262 or by copying to a new temporary with conversion.
1264 This function *must not* call protect_from_queue
1265 except when putting X into an insn (in which case convert_move does it). */
1268 convert_to_mode (mode, x, unsignedp)
1269 enum machine_mode mode;
1273 return convert_modes (mode, VOIDmode, x, unsignedp);
1276 /* Return an rtx for a value that would result
1277 from converting X from mode OLDMODE to mode MODE.
1278 Both modes may be floating, or both integer.
1279 UNSIGNEDP is nonzero if X is an unsigned value.
1281 This can be done by referring to a part of X in place
1282 or by copying to a new temporary with conversion.
1284 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1286 This function *must not* call protect_from_queue
1287 except when putting X into an insn (in which case convert_move does it). */
1290 convert_modes (mode, oldmode, x, unsignedp)
1291 enum machine_mode mode, oldmode;
1297 /* If FROM is a SUBREG that indicates that we have already done at least
1298 the required extension, strip it. */
1300 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1301 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1302 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1303 x = gen_lowpart (mode, x);
1305 if (GET_MODE (x) != VOIDmode)
1306 oldmode = GET_MODE (x);
1308 if (mode == oldmode)
1311 /* There is one case that we must handle specially: If we are converting
1312 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1313 we are to interpret the constant as unsigned, gen_lowpart will do
1314 the wrong if the constant appears negative. What we want to do is
1315 make the high-order word of the constant zero, not all ones. */
1317 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1319 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1321 HOST_WIDE_INT val = INTVAL (x);
1323 if (oldmode != VOIDmode
1324 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1326 int width = GET_MODE_BITSIZE (oldmode);
1328 /* We need to zero extend VAL. */
1329 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1332 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1335 /* We can do this with a gen_lowpart if both desired and current modes
1336 are integer, and this is either a constant integer, a register, or a
1337 non-volatile MEM. Except for the constant case where MODE is no
1338 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1340 if ((GET_CODE (x) == CONST_INT
1341 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1342 || (GET_MODE_CLASS (mode) == MODE_INT
1343 && GET_MODE_CLASS (oldmode) == MODE_INT
1344 && (GET_CODE (x) == CONST_DOUBLE
1345 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1346 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1347 && direct_load[(int) mode])
1348 || (GET_CODE (x) == REG
1349 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1350 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1352 /* ?? If we don't know OLDMODE, we have to assume here that
1353 X does not need sign- or zero-extension. This may not be
1354 the case, but it's the best we can do. */
1355 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1356 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1358 HOST_WIDE_INT val = INTVAL (x);
1359 int width = GET_MODE_BITSIZE (oldmode);
1361 /* We must sign or zero-extend in this case. Start by
1362 zero-extending, then sign extend if we need to. */
1363 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1365 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1366 val |= (HOST_WIDE_INT) (-1) << width;
1368 return GEN_INT (trunc_int_for_mode (val, mode));
1371 return gen_lowpart (mode, x);
1374 temp = gen_reg_rtx (mode);
1375 convert_move (temp, x, unsignedp);
1379 /* This macro is used to determine what the largest unit size that
1380 move_by_pieces can use is. */
1382 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1383 move efficiently, as opposed to MOVE_MAX which is the maximum
1384 number of bytes we can move with a single instruction. */
1386 #ifndef MOVE_MAX_PIECES
1387 #define MOVE_MAX_PIECES MOVE_MAX
1390 /* Generate several move instructions to copy LEN bytes
1391 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1392 The caller must pass FROM and TO
1393 through protect_from_queue before calling.
1395 When TO is NULL, the emit_single_push_insn is used to push the
1398 ALIGN is maximum alignment we can assume. */
1401 move_by_pieces (to, from, len, align)
1403 unsigned HOST_WIDE_INT len;
1406 struct move_by_pieces data;
1407 rtx to_addr, from_addr = XEXP (from, 0);
1408 unsigned int max_size = MOVE_MAX_PIECES + 1;
1409 enum machine_mode mode = VOIDmode, tmode;
1410 enum insn_code icode;
1413 data.from_addr = from_addr;
1416 to_addr = XEXP (to, 0);
1419 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1420 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1429 #ifdef STACK_GROWS_DOWNWARD
1435 data.to_addr = to_addr;
1438 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1439 || GET_CODE (from_addr) == POST_INC
1440 || GET_CODE (from_addr) == POST_DEC);
1442 data.explicit_inc_from = 0;
1443 data.explicit_inc_to = 0;
1444 if (data.reverse) data.offset = len;
1447 /* If copying requires more than two move insns,
1448 copy addresses to registers (to make displacements shorter)
1449 and use post-increment if available. */
1450 if (!(data.autinc_from && data.autinc_to)
1451 && move_by_pieces_ninsns (len, align) > 2)
1453 /* Find the mode of the largest move... */
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
1459 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1461 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1462 data.autinc_from = 1;
1463 data.explicit_inc_from = -1;
1465 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1467 data.from_addr = copy_addr_to_reg (from_addr);
1468 data.autinc_from = 1;
1469 data.explicit_inc_from = 1;
1471 if (!data.autinc_from && CONSTANT_P (from_addr))
1472 data.from_addr = copy_addr_to_reg (from_addr);
1473 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1475 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1477 data.explicit_inc_to = -1;
1479 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1481 data.to_addr = copy_addr_to_reg (to_addr);
1483 data.explicit_inc_to = 1;
1485 if (!data.autinc_to && CONSTANT_P (to_addr))
1486 data.to_addr = copy_addr_to_reg (to_addr);
1489 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1490 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1491 align = MOVE_MAX * BITS_PER_UNIT;
1493 /* First move what we can in the largest integer mode, then go to
1494 successively smaller modes. */
1496 while (max_size > 1)
1498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1500 if (GET_MODE_SIZE (tmode) < max_size)
1503 if (mode == VOIDmode)
1506 icode = mov_optab->handlers[(int) mode].insn_code;
1507 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1508 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1510 max_size = GET_MODE_SIZE (mode);
1513 /* The code above should have handled everything. */
1518 /* Return number of insns required to move L bytes by pieces.
1519 ALIGN (in bits) is maximum alignment we can assume. */
1521 static unsigned HOST_WIDE_INT
1522 move_by_pieces_ninsns (l, align)
1523 unsigned HOST_WIDE_INT l;
1526 unsigned HOST_WIDE_INT n_insns = 0;
1527 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1529 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1530 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1531 align = MOVE_MAX * BITS_PER_UNIT;
1533 while (max_size > 1)
1535 enum machine_mode mode = VOIDmode, tmode;
1536 enum insn_code icode;
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
1543 if (mode == VOIDmode)
1546 icode = mov_optab->handlers[(int) mode].insn_code;
1547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1548 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1550 max_size = GET_MODE_SIZE (mode);
1558 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1559 with move instructions for mode MODE. GENFUN is the gen_... function
1560 to make a move insn for that mode. DATA has all the other info. */
1563 move_by_pieces_1 (genfun, mode, data)
1564 rtx (*genfun) PARAMS ((rtx, ...));
1565 enum machine_mode mode;
1566 struct move_by_pieces *data;
1568 unsigned int size = GET_MODE_SIZE (mode);
1571 while (data->len >= size)
1574 data->offset -= size;
1578 if (data->autinc_to)
1580 to1 = gen_rtx_MEM (mode, data->to_addr);
1581 MEM_COPY_ATTRIBUTES (to1, data->to);
1584 to1 = adjust_address (data->to, mode, data->offset);
1587 if (data->autinc_from)
1589 from1 = gen_rtx_MEM (mode, data->from_addr);
1590 MEM_COPY_ATTRIBUTES (from1, data->from);
1593 from1 = adjust_address (data->from, mode, data->offset);
1595 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1598 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1601 emit_insn ((*genfun) (to1, from1));
1603 emit_single_push_insn (mode, from1, NULL);
1605 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1606 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1607 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1608 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1610 if (! data->reverse)
1611 data->offset += size;
1617 /* Emit code to move a block Y to a block X.
1618 This may be done with string-move instructions,
1619 with multiple scalar move instructions, or with a library call.
1621 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1623 SIZE is an rtx that says how long they are.
1624 ALIGN is the maximum alignment we can assume they have.
1626 Return the address of the new block, if memcpy is called and returns it,
1630 emit_block_move (x, y, size, align)
1636 #ifdef TARGET_MEM_FUNCTIONS
1638 tree call_expr, arg_list;
1641 if (GET_MODE (x) != BLKmode)
1644 if (GET_MODE (y) != BLKmode)
1647 x = protect_from_queue (x, 1);
1648 y = protect_from_queue (y, 0);
1649 size = protect_from_queue (size, 0);
1651 if (GET_CODE (x) != MEM)
1653 if (GET_CODE (y) != MEM)
1658 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1659 move_by_pieces (x, y, INTVAL (size), align);
1662 /* Try the most limited insn first, because there's no point
1663 including more than one in the machine description unless
1664 the more limited one has some advantage. */
1666 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1667 enum machine_mode mode;
1669 /* Since this is a move insn, we don't care about volatility. */
1672 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1673 mode = GET_MODE_WIDER_MODE (mode))
1675 enum insn_code code = movstr_optab[(int) mode];
1676 insn_operand_predicate_fn pred;
1678 if (code != CODE_FOR_nothing
1679 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1680 here because if SIZE is less than the mode mask, as it is
1681 returned by the macro, it will definitely be less than the
1682 actual mode mask. */
1683 && ((GET_CODE (size) == CONST_INT
1684 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1685 <= (GET_MODE_MASK (mode) >> 1)))
1686 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1687 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1688 || (*pred) (x, BLKmode))
1689 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1690 || (*pred) (y, BLKmode))
1691 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1692 || (*pred) (opalign, VOIDmode)))
1695 rtx last = get_last_insn ();
1698 op2 = convert_to_mode (mode, size, 1);
1699 pred = insn_data[(int) code].operand[2].predicate;
1700 if (pred != 0 && ! (*pred) (op2, mode))
1701 op2 = copy_to_mode_reg (mode, op2);
1703 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1711 delete_insns_since (last);
1717 /* X, Y, or SIZE may have been passed through protect_from_queue.
1719 It is unsafe to save the value generated by protect_from_queue
1720 and reuse it later. Consider what happens if emit_queue is
1721 called before the return value from protect_from_queue is used.
1723 Expansion of the CALL_EXPR below will call emit_queue before
1724 we are finished emitting RTL for argument setup. So if we are
1725 not careful we could get the wrong value for an argument.
1727 To avoid this problem we go ahead and emit code to copy X, Y &
1728 SIZE into new pseudos. We can then place those new pseudos
1729 into an RTL_EXPR and use them later, even after a call to
1732 Note this is not strictly needed for library calls since they
1733 do not call emit_queue before loading their arguments. However,
1734 we may need to have library calls call emit_queue in the future
1735 since failing to do so could cause problems for targets which
1736 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1737 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1738 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1740 #ifdef TARGET_MEM_FUNCTIONS
1741 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1743 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1744 TREE_UNSIGNED (integer_type_node));
1745 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1748 #ifdef TARGET_MEM_FUNCTIONS
1749 /* It is incorrect to use the libcall calling conventions to call
1750 memcpy in this context.
1752 This could be a user call to memcpy and the user may wish to
1753 examine the return value from memcpy.
1755 For targets where libcalls and normal calls have different conventions
1756 for returning pointers, we could end up generating incorrect code.
1758 So instead of using a libcall sequence we build up a suitable
1759 CALL_EXPR and expand the call in the normal fashion. */
1760 if (fn == NULL_TREE)
1764 /* This was copied from except.c, I don't know if all this is
1765 necessary in this context or not. */
1766 fn = get_identifier ("memcpy");
1767 fntype = build_pointer_type (void_type_node);
1768 fntype = build_function_type (fntype, NULL_TREE);
1769 fn = build_decl (FUNCTION_DECL, fn, fntype);
1770 ggc_add_tree_root (&fn, 1);
1771 DECL_EXTERNAL (fn) = 1;
1772 TREE_PUBLIC (fn) = 1;
1773 DECL_ARTIFICIAL (fn) = 1;
1774 TREE_NOTHROW (fn) = 1;
1775 make_decl_rtl (fn, NULL);
1776 assemble_external (fn);
1779 /* We need to make an argument list for the function call.
1781 memcpy has three arguments, the first two are void * addresses and
1782 the last is a size_t byte count for the copy. */
1784 = build_tree_list (NULL_TREE,
1785 make_tree (build_pointer_type (void_type_node), x));
1786 TREE_CHAIN (arg_list)
1787 = build_tree_list (NULL_TREE,
1788 make_tree (build_pointer_type (void_type_node), y));
1789 TREE_CHAIN (TREE_CHAIN (arg_list))
1790 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1791 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1793 /* Now we have to build up the CALL_EXPR itself. */
1794 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1795 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1796 call_expr, arg_list, NULL_TREE);
1797 TREE_SIDE_EFFECTS (call_expr) = 1;
1799 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1801 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1802 VOIDmode, 3, y, Pmode, x, Pmode,
1803 convert_to_mode (TYPE_MODE (integer_type_node), size,
1804 TREE_UNSIGNED (integer_type_node)),
1805 TYPE_MODE (integer_type_node));
1812 /* Copy all or part of a value X into registers starting at REGNO.
1813 The number of registers to be filled is NREGS. */
1816 move_block_to_reg (regno, x, nregs, mode)
1820 enum machine_mode mode;
1823 #ifdef HAVE_load_multiple
1831 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1832 x = validize_mem (force_const_mem (mode, x));
1834 /* See if the machine can do this with a load multiple insn. */
1835 #ifdef HAVE_load_multiple
1836 if (HAVE_load_multiple)
1838 last = get_last_insn ();
1839 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1847 delete_insns_since (last);
1851 for (i = 0; i < nregs; i++)
1852 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1853 operand_subword_force (x, i, mode));
1856 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1857 The number of registers to be filled is NREGS. SIZE indicates the number
1858 of bytes in the object X. */
1861 move_block_from_reg (regno, x, nregs, size)
1868 #ifdef HAVE_store_multiple
1872 enum machine_mode mode;
1877 /* If SIZE is that of a mode no bigger than a word, just use that
1878 mode's store operation. */
1879 if (size <= UNITS_PER_WORD
1880 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1882 emit_move_insn (change_address (x, mode, NULL),
1883 gen_rtx_REG (mode, regno));
1887 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1888 to the left before storing to memory. Note that the previous test
1889 doesn't handle all cases (e.g. SIZE == 3). */
1890 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1892 rtx tem = operand_subword (x, 0, 1, BLKmode);
1898 shift = expand_shift (LSHIFT_EXPR, word_mode,
1899 gen_rtx_REG (word_mode, regno),
1900 build_int_2 ((UNITS_PER_WORD - size)
1901 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1902 emit_move_insn (tem, shift);
1906 /* See if the machine can do this with a store multiple insn. */
1907 #ifdef HAVE_store_multiple
1908 if (HAVE_store_multiple)
1910 last = get_last_insn ();
1911 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1919 delete_insns_since (last);
1923 for (i = 0; i < nregs; i++)
1925 rtx tem = operand_subword (x, i, 1, BLKmode);
1930 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1934 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1935 registers represented by a PARALLEL. SSIZE represents the total size of
1936 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1938 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1939 the balance will be in what would be the low-order memory addresses, i.e.
1940 left justified for big endian, right justified for little endian. This
1941 happens to be true for the targets currently using this support. If this
1942 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1946 emit_group_load (dst, orig_src, ssize, align)
1954 if (GET_CODE (dst) != PARALLEL)
1957 /* Check for a NULL entry, used to indicate that the parameter goes
1958 both on the stack and in registers. */
1959 if (XEXP (XVECEXP (dst, 0, 0), 0))
1964 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1966 /* Process the pieces. */
1967 for (i = start; i < XVECLEN (dst, 0); i++)
1969 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1970 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1971 unsigned int bytelen = GET_MODE_SIZE (mode);
1974 /* Handle trailing fragments that run over the size of the struct. */
1975 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1977 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1978 bytelen = ssize - bytepos;
1983 /* If we won't be loading directly from memory, protect the real source
1984 from strange tricks we might play; but make sure that the source can
1985 be loaded directly into the destination. */
1987 if (GET_CODE (orig_src) != MEM
1988 && (!CONSTANT_P (orig_src)
1989 || (GET_MODE (orig_src) != mode
1990 && GET_MODE (orig_src) != VOIDmode)))
1992 if (GET_MODE (orig_src) == VOIDmode)
1993 src = gen_reg_rtx (mode);
1995 src = gen_reg_rtx (GET_MODE (orig_src));
1996 emit_move_insn (src, orig_src);
1999 /* Optimize the access just a bit. */
2000 if (GET_CODE (src) == MEM
2001 && align >= GET_MODE_ALIGNMENT (mode)
2002 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2003 && bytelen == GET_MODE_SIZE (mode))
2005 tmps[i] = gen_reg_rtx (mode);
2006 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2008 else if (GET_CODE (src) == CONCAT)
2011 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2012 tmps[i] = XEXP (src, 0);
2013 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2014 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2015 tmps[i] = XEXP (src, 1);
2019 else if (CONSTANT_P (src)
2020 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2023 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2024 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2025 mode, mode, align, ssize);
2027 if (BYTES_BIG_ENDIAN && shift)
2028 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2029 tmps[i], 0, OPTAB_WIDEN);
2034 /* Copy the extracted pieces into the proper (probable) hard regs. */
2035 for (i = start; i < XVECLEN (dst, 0); i++)
2036 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2039 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2040 registers represented by a PARALLEL. SSIZE represents the total size of
2041 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2044 emit_group_store (orig_dst, src, ssize, align)
2052 if (GET_CODE (src) != PARALLEL)
2055 /* Check for a NULL entry, used to indicate that the parameter goes
2056 both on the stack and in registers. */
2057 if (XEXP (XVECEXP (src, 0, 0), 0))
2062 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2064 /* Copy the (probable) hard regs into pseudos. */
2065 for (i = start; i < XVECLEN (src, 0); i++)
2067 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2068 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2069 emit_move_insn (tmps[i], reg);
2073 /* If we won't be storing directly into memory, protect the real destination
2074 from strange tricks we might play. */
2076 if (GET_CODE (dst) == PARALLEL)
2080 /* We can get a PARALLEL dst if there is a conditional expression in
2081 a return statement. In that case, the dst and src are the same,
2082 so no action is necessary. */
2083 if (rtx_equal_p (dst, src))
2086 /* It is unclear if we can ever reach here, but we may as well handle
2087 it. Allocate a temporary, and split this into a store/load to/from
2090 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2091 emit_group_store (temp, src, ssize, align);
2092 emit_group_load (dst, temp, ssize, align);
2095 else if (GET_CODE (dst) != MEM)
2097 dst = gen_reg_rtx (GET_MODE (orig_dst));
2098 /* Make life a bit easier for combine. */
2099 emit_move_insn (dst, const0_rtx);
2102 /* Process the pieces. */
2103 for (i = start; i < XVECLEN (src, 0); i++)
2105 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2106 enum machine_mode mode = GET_MODE (tmps[i]);
2107 unsigned int bytelen = GET_MODE_SIZE (mode);
2109 /* Handle trailing fragments that run over the size of the struct. */
2110 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2112 if (BYTES_BIG_ENDIAN)
2114 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2115 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2116 tmps[i], 0, OPTAB_WIDEN);
2118 bytelen = ssize - bytepos;
2121 /* Optimize the access just a bit. */
2122 if (GET_CODE (dst) == MEM
2123 && align >= GET_MODE_ALIGNMENT (mode)
2124 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2125 && bytelen == GET_MODE_SIZE (mode))
2126 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2128 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2129 mode, tmps[i], align, ssize);
2134 /* Copy from the pseudo into the (probable) hard reg. */
2135 if (GET_CODE (dst) == REG)
2136 emit_move_insn (orig_dst, dst);
2139 /* Generate code to copy a BLKmode object of TYPE out of a
2140 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2141 is null, a stack temporary is created. TGTBLK is returned.
2143 The primary purpose of this routine is to handle functions
2144 that return BLKmode structures in registers. Some machines
2145 (the PA for example) want to return all small structures
2146 in registers regardless of the structure's alignment. */
2149 copy_blkmode_from_reg (tgtblk, srcreg, type)
2154 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2155 rtx src = NULL, dst = NULL;
2156 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2157 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2161 tgtblk = assign_temp (build_qualified_type (type,
2163 | TYPE_QUAL_CONST)),
2165 preserve_temp_slots (tgtblk);
2168 /* This code assumes srcreg is at least a full word. If it isn't,
2169 copy it into a new pseudo which is a full word. */
2170 if (GET_MODE (srcreg) != BLKmode
2171 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2172 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2174 /* Structures whose size is not a multiple of a word are aligned
2175 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2176 machine, this means we must skip the empty high order bytes when
2177 calculating the bit offset. */
2178 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2179 big_endian_correction
2180 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2182 /* Copy the structure BITSIZE bites at a time.
2184 We could probably emit more efficient code for machines which do not use
2185 strict alignment, but it doesn't seem worth the effort at the current
2187 for (bitpos = 0, xbitpos = big_endian_correction;
2188 bitpos < bytes * BITS_PER_UNIT;
2189 bitpos += bitsize, xbitpos += bitsize)
2191 /* We need a new source operand each time xbitpos is on a
2192 word boundary and when xbitpos == big_endian_correction
2193 (the first time through). */
2194 if (xbitpos % BITS_PER_WORD == 0
2195 || xbitpos == big_endian_correction)
2196 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2199 /* We need a new destination operand each time bitpos is on
2201 if (bitpos % BITS_PER_WORD == 0)
2202 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2204 /* Use xbitpos for the source extraction (right justified) and
2205 xbitpos for the destination store (left justified). */
2206 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2207 extract_bit_field (src, bitsize,
2208 xbitpos % BITS_PER_WORD, 1,
2209 NULL_RTX, word_mode, word_mode,
2210 bitsize, BITS_PER_WORD),
2211 bitsize, BITS_PER_WORD);
2217 /* Add a USE expression for REG to the (possibly empty) list pointed
2218 to by CALL_FUSAGE. REG must denote a hard register. */
2221 use_reg (call_fusage, reg)
2222 rtx *call_fusage, reg;
2224 if (GET_CODE (reg) != REG
2225 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2229 = gen_rtx_EXPR_LIST (VOIDmode,
2230 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2233 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2234 starting at REGNO. All of these registers must be hard registers. */
2237 use_regs (call_fusage, regno, nregs)
2244 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2247 for (i = 0; i < nregs; i++)
2248 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2251 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2252 PARALLEL REGS. This is for calls that pass values in multiple
2253 non-contiguous locations. The Irix 6 ABI has examples of this. */
2256 use_group_regs (call_fusage, regs)
2262 for (i = 0; i < XVECLEN (regs, 0); i++)
2264 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2266 /* A NULL entry means the parameter goes both on the stack and in
2267 registers. This can also be a MEM for targets that pass values
2268 partially on the stack and partially in registers. */
2269 if (reg != 0 && GET_CODE (reg) == REG)
2270 use_reg (call_fusage, reg);
2276 can_store_by_pieces (len, constfun, constfundata, align)
2277 unsigned HOST_WIDE_INT len;
2278 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2282 unsigned HOST_WIDE_INT max_size, l;
2283 HOST_WIDE_INT offset = 0;
2284 enum machine_mode mode, tmode;
2285 enum insn_code icode;
2289 if (! MOVE_BY_PIECES_P (len, align))
2292 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2293 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2294 align = MOVE_MAX * BITS_PER_UNIT;
2296 /* We would first store what we can in the largest integer mode, then go to
2297 successively smaller modes. */
2300 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2305 max_size = MOVE_MAX_PIECES + 1;
2306 while (max_size > 1)
2308 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2309 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2310 if (GET_MODE_SIZE (tmode) < max_size)
2313 if (mode == VOIDmode)
2316 icode = mov_optab->handlers[(int) mode].insn_code;
2317 if (icode != CODE_FOR_nothing
2318 && align >= GET_MODE_ALIGNMENT (mode))
2320 unsigned int size = GET_MODE_SIZE (mode);
2327 cst = (*constfun) (constfundata, offset, mode);
2328 if (!LEGITIMATE_CONSTANT_P (cst))
2338 max_size = GET_MODE_SIZE (mode);
2341 /* The code above should have handled everything. */
2349 /* Generate several move instructions to store LEN bytes generated by
2350 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2351 pointer which will be passed as argument in every CONSTFUN call.
2352 ALIGN is maximum alignment we can assume. */
2355 store_by_pieces (to, len, constfun, constfundata, align)
2357 unsigned HOST_WIDE_INT len;
2358 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2362 struct store_by_pieces data;
2364 if (! MOVE_BY_PIECES_P (len, align))
2366 to = protect_from_queue (to, 1);
2367 data.constfun = constfun;
2368 data.constfundata = constfundata;
2371 store_by_pieces_1 (&data, align);
2374 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2375 rtx with BLKmode). The caller must pass TO through protect_from_queue
2376 before calling. ALIGN is maximum alignment we can assume. */
2379 clear_by_pieces (to, len, align)
2381 unsigned HOST_WIDE_INT len;
2384 struct store_by_pieces data;
2386 data.constfun = clear_by_pieces_1;
2387 data.constfundata = NULL;
2390 store_by_pieces_1 (&data, align);
2393 /* Callback routine for clear_by_pieces.
2394 Return const0_rtx unconditionally. */
2397 clear_by_pieces_1 (data, offset, mode)
2398 PTR data ATTRIBUTE_UNUSED;
2399 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2400 enum machine_mode mode ATTRIBUTE_UNUSED;
2405 /* Subroutine of clear_by_pieces and store_by_pieces.
2406 Generate several move instructions to store LEN bytes of block TO. (A MEM
2407 rtx with BLKmode). The caller must pass TO through protect_from_queue
2408 before calling. ALIGN is maximum alignment we can assume. */
2411 store_by_pieces_1 (data, align)
2412 struct store_by_pieces *data;
2415 rtx to_addr = XEXP (data->to, 0);
2416 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2417 enum machine_mode mode = VOIDmode, tmode;
2418 enum insn_code icode;
2421 data->to_addr = to_addr;
2423 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2424 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2426 data->explicit_inc_to = 0;
2428 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2430 data->offset = data->len;
2432 /* If storing requires more than two move insns,
2433 copy addresses to registers (to make displacements shorter)
2434 and use post-increment if available. */
2435 if (!data->autinc_to
2436 && move_by_pieces_ninsns (data->len, align) > 2)
2438 /* Determine the main mode we'll be using. */
2439 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2440 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2441 if (GET_MODE_SIZE (tmode) < max_size)
2444 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2446 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2447 data->autinc_to = 1;
2448 data->explicit_inc_to = -1;
2451 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2452 && ! data->autinc_to)
2454 data->to_addr = copy_addr_to_reg (to_addr);
2455 data->autinc_to = 1;
2456 data->explicit_inc_to = 1;
2459 if ( !data->autinc_to && CONSTANT_P (to_addr))
2460 data->to_addr = copy_addr_to_reg (to_addr);
2463 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2464 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2465 align = MOVE_MAX * BITS_PER_UNIT;
2467 /* First store what we can in the largest integer mode, then go to
2468 successively smaller modes. */
2470 while (max_size > 1)
2472 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2473 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2474 if (GET_MODE_SIZE (tmode) < max_size)
2477 if (mode == VOIDmode)
2480 icode = mov_optab->handlers[(int) mode].insn_code;
2481 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2482 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2484 max_size = GET_MODE_SIZE (mode);
2487 /* The code above should have handled everything. */
2492 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2493 with move instructions for mode MODE. GENFUN is the gen_... function
2494 to make a move insn for that mode. DATA has all the other info. */
2497 store_by_pieces_2 (genfun, mode, data)
2498 rtx (*genfun) PARAMS ((rtx, ...));
2499 enum machine_mode mode;
2500 struct store_by_pieces *data;
2502 unsigned int size = GET_MODE_SIZE (mode);
2505 while (data->len >= size)
2508 data->offset -= size;
2510 if (data->autinc_to)
2512 to1 = gen_rtx_MEM (mode, data->to_addr);
2513 MEM_COPY_ATTRIBUTES (to1, data->to);
2516 to1 = adjust_address (data->to, mode, data->offset);
2518 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2519 emit_insn (gen_add2_insn (data->to_addr,
2520 GEN_INT (-(HOST_WIDE_INT) size)));
2522 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2523 emit_insn ((*genfun) (to1, cst));
2525 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2526 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2528 if (! data->reverse)
2529 data->offset += size;
2535 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2536 its length in bytes and ALIGN is the maximum alignment we can is has.
2538 If we call a function that returns the length of the block, return it. */
2541 clear_storage (object, size, align)
2546 #ifdef TARGET_MEM_FUNCTIONS
2548 tree call_expr, arg_list;
2552 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2553 just move a zero. Otherwise, do this a piece at a time. */
2554 if (GET_MODE (object) != BLKmode
2555 && GET_CODE (size) == CONST_INT
2556 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2557 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2560 object = protect_from_queue (object, 1);
2561 size = protect_from_queue (size, 0);
2563 if (GET_CODE (size) == CONST_INT
2564 && MOVE_BY_PIECES_P (INTVAL (size), align))
2565 clear_by_pieces (object, INTVAL (size), align);
2568 /* Try the most limited insn first, because there's no point
2569 including more than one in the machine description unless
2570 the more limited one has some advantage. */
2572 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2573 enum machine_mode mode;
2575 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2576 mode = GET_MODE_WIDER_MODE (mode))
2578 enum insn_code code = clrstr_optab[(int) mode];
2579 insn_operand_predicate_fn pred;
2581 if (code != CODE_FOR_nothing
2582 /* We don't need MODE to be narrower than
2583 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2584 the mode mask, as it is returned by the macro, it will
2585 definitely be less than the actual mode mask. */
2586 && ((GET_CODE (size) == CONST_INT
2587 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2588 <= (GET_MODE_MASK (mode) >> 1)))
2589 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2590 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2591 || (*pred) (object, BLKmode))
2592 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2593 || (*pred) (opalign, VOIDmode)))
2596 rtx last = get_last_insn ();
2599 op1 = convert_to_mode (mode, size, 1);
2600 pred = insn_data[(int) code].operand[1].predicate;
2601 if (pred != 0 && ! (*pred) (op1, mode))
2602 op1 = copy_to_mode_reg (mode, op1);
2604 pat = GEN_FCN ((int) code) (object, op1, opalign);
2611 delete_insns_since (last);
2615 /* OBJECT or SIZE may have been passed through protect_from_queue.
2617 It is unsafe to save the value generated by protect_from_queue
2618 and reuse it later. Consider what happens if emit_queue is
2619 called before the return value from protect_from_queue is used.
2621 Expansion of the CALL_EXPR below will call emit_queue before
2622 we are finished emitting RTL for argument setup. So if we are
2623 not careful we could get the wrong value for an argument.
2625 To avoid this problem we go ahead and emit code to copy OBJECT
2626 and SIZE into new pseudos. We can then place those new pseudos
2627 into an RTL_EXPR and use them later, even after a call to
2630 Note this is not strictly needed for library calls since they
2631 do not call emit_queue before loading their arguments. However,
2632 we may need to have library calls call emit_queue in the future
2633 since failing to do so could cause problems for targets which
2634 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2635 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2637 #ifdef TARGET_MEM_FUNCTIONS
2638 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2640 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2641 TREE_UNSIGNED (integer_type_node));
2642 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2645 #ifdef TARGET_MEM_FUNCTIONS
2646 /* It is incorrect to use the libcall calling conventions to call
2647 memset in this context.
2649 This could be a user call to memset and the user may wish to
2650 examine the return value from memset.
2652 For targets where libcalls and normal calls have different
2653 conventions for returning pointers, we could end up generating
2656 So instead of using a libcall sequence we build up a suitable
2657 CALL_EXPR and expand the call in the normal fashion. */
2658 if (fn == NULL_TREE)
2662 /* This was copied from except.c, I don't know if all this is
2663 necessary in this context or not. */
2664 fn = get_identifier ("memset");
2665 fntype = build_pointer_type (void_type_node);
2666 fntype = build_function_type (fntype, NULL_TREE);
2667 fn = build_decl (FUNCTION_DECL, fn, fntype);
2668 ggc_add_tree_root (&fn, 1);
2669 DECL_EXTERNAL (fn) = 1;
2670 TREE_PUBLIC (fn) = 1;
2671 DECL_ARTIFICIAL (fn) = 1;
2672 TREE_NOTHROW (fn) = 1;
2673 make_decl_rtl (fn, NULL);
2674 assemble_external (fn);
2677 /* We need to make an argument list for the function call.
2679 memset has three arguments, the first is a void * addresses, the
2680 second a integer with the initialization value, the last is a
2681 size_t byte count for the copy. */
2683 = build_tree_list (NULL_TREE,
2684 make_tree (build_pointer_type (void_type_node),
2686 TREE_CHAIN (arg_list)
2687 = build_tree_list (NULL_TREE,
2688 make_tree (integer_type_node, const0_rtx));
2689 TREE_CHAIN (TREE_CHAIN (arg_list))
2690 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2691 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2693 /* Now we have to build up the CALL_EXPR itself. */
2694 call_expr = build1 (ADDR_EXPR,
2695 build_pointer_type (TREE_TYPE (fn)), fn);
2696 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2697 call_expr, arg_list, NULL_TREE);
2698 TREE_SIDE_EFFECTS (call_expr) = 1;
2700 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2702 emit_library_call (bzero_libfunc, LCT_NORMAL,
2703 VOIDmode, 2, object, Pmode, size,
2704 TYPE_MODE (integer_type_node));
2712 /* Generate code to copy Y into X.
2713 Both Y and X must have the same mode, except that
2714 Y can be a constant with VOIDmode.
2715 This mode cannot be BLKmode; use emit_block_move for that.
2717 Return the last instruction emitted. */
2720 emit_move_insn (x, y)
2723 enum machine_mode mode = GET_MODE (x);
2724 rtx y_cst = NULL_RTX;
2727 x = protect_from_queue (x, 1);
2728 y = protect_from_queue (y, 0);
2730 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2733 /* Never force constant_p_rtx to memory. */
2734 if (GET_CODE (y) == CONSTANT_P_RTX)
2736 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2739 y = force_const_mem (mode, y);
2742 /* If X or Y are memory references, verify that their addresses are valid
2744 if (GET_CODE (x) == MEM
2745 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2746 && ! push_operand (x, GET_MODE (x)))
2748 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2749 x = change_address (x, VOIDmode, XEXP (x, 0));
2751 if (GET_CODE (y) == MEM
2752 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2754 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2755 y = change_address (y, VOIDmode, XEXP (y, 0));
2757 if (mode == BLKmode)
2760 last_insn = emit_move_insn_1 (x, y);
2762 if (y_cst && GET_CODE (x) == REG)
2763 REG_NOTES (last_insn)
2764 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2769 /* Low level part of emit_move_insn.
2770 Called just like emit_move_insn, but assumes X and Y
2771 are basically valid. */
2774 emit_move_insn_1 (x, y)
2777 enum machine_mode mode = GET_MODE (x);
2778 enum machine_mode submode;
2779 enum mode_class class = GET_MODE_CLASS (mode);
2782 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2785 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2787 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2789 /* Expand complex moves by moving real part and imag part, if possible. */
2790 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2791 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2793 (class == MODE_COMPLEX_INT
2794 ? MODE_INT : MODE_FLOAT),
2796 && (mov_optab->handlers[(int) submode].insn_code
2797 != CODE_FOR_nothing))
2799 /* Don't split destination if it is a stack push. */
2800 int stack = push_operand (x, GET_MODE (x));
2802 #ifdef PUSH_ROUNDING
2803 /* In case we output to the stack, but the size is smaller machine can
2804 push exactly, we need to use move instructions. */
2806 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2809 int offset1, offset2;
2811 /* Do not use anti_adjust_stack, since we don't want to update
2812 stack_pointer_delta. */
2813 temp = expand_binop (Pmode,
2814 #ifdef STACK_GROWS_DOWNWARD
2821 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2825 if (temp != stack_pointer_rtx)
2826 emit_move_insn (stack_pointer_rtx, temp);
2827 #ifdef STACK_GROWS_DOWNWARD
2829 offset2 = GET_MODE_SIZE (submode);
2831 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2832 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2833 + GET_MODE_SIZE (submode));
2835 emit_move_insn (change_address (x, submode,
2836 gen_rtx_PLUS (Pmode,
2838 GEN_INT (offset1))),
2839 gen_realpart (submode, y));
2840 emit_move_insn (change_address (x, submode,
2841 gen_rtx_PLUS (Pmode,
2843 GEN_INT (offset2))),
2844 gen_imagpart (submode, y));
2848 /* If this is a stack, push the highpart first, so it
2849 will be in the argument order.
2851 In that case, change_address is used only to convert
2852 the mode, not to change the address. */
2855 /* Note that the real part always precedes the imag part in memory
2856 regardless of machine's endianness. */
2857 #ifdef STACK_GROWS_DOWNWARD
2858 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2859 (gen_rtx_MEM (submode, XEXP (x, 0)),
2860 gen_imagpart (submode, y)));
2861 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2862 (gen_rtx_MEM (submode, XEXP (x, 0)),
2863 gen_realpart (submode, y)));
2865 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2866 (gen_rtx_MEM (submode, XEXP (x, 0)),
2867 gen_realpart (submode, y)));
2868 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2869 (gen_rtx_MEM (submode, XEXP (x, 0)),
2870 gen_imagpart (submode, y)));
2875 rtx realpart_x, realpart_y;
2876 rtx imagpart_x, imagpart_y;
2878 /* If this is a complex value with each part being smaller than a
2879 word, the usual calling sequence will likely pack the pieces into
2880 a single register. Unfortunately, SUBREG of hard registers only
2881 deals in terms of words, so we have a problem converting input
2882 arguments to the CONCAT of two registers that is used elsewhere
2883 for complex values. If this is before reload, we can copy it into
2884 memory and reload. FIXME, we should see about using extract and
2885 insert on integer registers, but complex short and complex char
2886 variables should be rarely used. */
2887 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2888 && (reload_in_progress | reload_completed) == 0)
2890 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2891 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2893 if (packed_dest_p || packed_src_p)
2895 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2896 ? MODE_FLOAT : MODE_INT);
2898 enum machine_mode reg_mode
2899 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2901 if (reg_mode != BLKmode)
2903 rtx mem = assign_stack_temp (reg_mode,
2904 GET_MODE_SIZE (mode), 0);
2905 rtx cmem = adjust_address (mem, mode, 0);
2908 = N_("function using short complex types cannot be inline");
2912 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2913 emit_move_insn_1 (cmem, y);
2914 return emit_move_insn_1 (sreg, mem);
2918 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2919 emit_move_insn_1 (mem, sreg);
2920 return emit_move_insn_1 (x, cmem);
2926 realpart_x = gen_realpart (submode, x);
2927 realpart_y = gen_realpart (submode, y);
2928 imagpart_x = gen_imagpart (submode, x);
2929 imagpart_y = gen_imagpart (submode, y);
2931 /* Show the output dies here. This is necessary for SUBREGs
2932 of pseudos since we cannot track their lifetimes correctly;
2933 hard regs shouldn't appear here except as return values.
2934 We never want to emit such a clobber after reload. */
2936 && ! (reload_in_progress || reload_completed)
2937 && (GET_CODE (realpart_x) == SUBREG
2938 || GET_CODE (imagpart_x) == SUBREG))
2940 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2943 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2944 (realpart_x, realpart_y));
2945 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2946 (imagpart_x, imagpart_y));
2949 return get_last_insn ();
2952 /* This will handle any multi-word mode that lacks a move_insn pattern.
2953 However, you will get better code if you define such patterns,
2954 even if they must turn into multiple assembler instructions. */
2955 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2961 #ifdef PUSH_ROUNDING
2963 /* If X is a push on the stack, do the push now and replace
2964 X with a reference to the stack pointer. */
2965 if (push_operand (x, GET_MODE (x)))
2967 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2968 x = change_address (x, VOIDmode, stack_pointer_rtx);
2972 /* If we are in reload, see if either operand is a MEM whose address
2973 is scheduled for replacement. */
2974 if (reload_in_progress && GET_CODE (x) == MEM
2975 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2977 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2979 MEM_COPY_ATTRIBUTES (new, x);
2982 if (reload_in_progress && GET_CODE (y) == MEM
2983 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2985 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2987 MEM_COPY_ATTRIBUTES (new, y);
2995 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2998 rtx xpart = operand_subword (x, i, 1, mode);
2999 rtx ypart = operand_subword (y, i, 1, mode);
3001 /* If we can't get a part of Y, put Y into memory if it is a
3002 constant. Otherwise, force it into a register. If we still
3003 can't get a part of Y, abort. */
3004 if (ypart == 0 && CONSTANT_P (y))
3006 y = force_const_mem (mode, y);
3007 ypart = operand_subword (y, i, 1, mode);
3009 else if (ypart == 0)
3010 ypart = operand_subword_force (y, i, mode);
3012 if (xpart == 0 || ypart == 0)
3015 need_clobber |= (GET_CODE (xpart) == SUBREG);
3017 last_insn = emit_move_insn (xpart, ypart);
3020 seq = gen_sequence ();
3023 /* Show the output dies here. This is necessary for SUBREGs
3024 of pseudos since we cannot track their lifetimes correctly;
3025 hard regs shouldn't appear here except as return values.
3026 We never want to emit such a clobber after reload. */
3028 && ! (reload_in_progress || reload_completed)
3029 && need_clobber != 0)
3031 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3042 /* Pushing data onto the stack. */
3044 /* Push a block of length SIZE (perhaps variable)
3045 and return an rtx to address the beginning of the block.
3046 Note that it is not possible for the value returned to be a QUEUED.
3047 The value may be virtual_outgoing_args_rtx.
3049 EXTRA is the number of bytes of padding to push in addition to SIZE.
3050 BELOW nonzero means this padding comes at low addresses;
3051 otherwise, the padding comes at high addresses. */
3054 push_block (size, extra, below)
3060 size = convert_modes (Pmode, ptr_mode, size, 1);
3061 if (CONSTANT_P (size))
3062 anti_adjust_stack (plus_constant (size, extra));
3063 else if (GET_CODE (size) == REG && extra == 0)
3064 anti_adjust_stack (size);
3067 temp = copy_to_mode_reg (Pmode, size);
3069 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3070 temp, 0, OPTAB_LIB_WIDEN);
3071 anti_adjust_stack (temp);
3074 #ifndef STACK_GROWS_DOWNWARD
3075 #ifdef ARGS_GROW_DOWNWARD
3076 if (!ACCUMULATE_OUTGOING_ARGS)
3084 /* Return the lowest stack address when STACK or ARGS grow downward and
3085 we are not aaccumulating outgoing arguments (the c4x port uses such
3087 temp = virtual_outgoing_args_rtx;
3088 if (extra != 0 && below)
3089 temp = plus_constant (temp, extra);
3093 if (GET_CODE (size) == CONST_INT)
3094 temp = plus_constant (virtual_outgoing_args_rtx,
3095 -INTVAL (size) - (below ? 0 : extra));
3096 else if (extra != 0 && !below)
3097 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3098 negate_rtx (Pmode, plus_constant (size, extra)));
3100 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3101 negate_rtx (Pmode, size));
3104 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3108 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3109 block of SIZE bytes. */
3112 get_push_address (size)
3117 if (STACK_PUSH_CODE == POST_DEC)
3118 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3119 else if (STACK_PUSH_CODE == POST_INC)
3120 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3122 temp = stack_pointer_rtx;
3124 return copy_to_reg (temp);
3127 /* Emit single push insn. */
3129 emit_single_push_insn (mode, x, type)
3131 enum machine_mode mode;
3134 #ifdef PUSH_ROUNDING
3136 int rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3139 if (GET_MODE_SIZE (mode) == rounded_size)
3140 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3143 #ifdef STACK_GROWS_DOWNWARD
3144 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3145 GEN_INT (-rounded_size));
3147 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3148 GEN_INT (rounded_size));
3150 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3153 dest = gen_rtx_MEM (mode, dest_addr);
3155 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3159 set_mem_attributes (dest, type, 1);
3160 /* Function incoming arguments may overlap with sibling call
3161 outgoing arguments and we cannot allow reordering of reads
3162 from function arguments with stores to outgoing arguments
3163 of sibling calls. */
3164 MEM_ALIAS_SET (dest) = 0;
3166 emit_move_insn (dest, x);
3172 /* Generate code to push X onto the stack, assuming it has mode MODE and
3174 MODE is redundant except when X is a CONST_INT (since they don't
3176 SIZE is an rtx for the size of data to be copied (in bytes),
3177 needed only if X is BLKmode.
3179 ALIGN (in bits) is maximum alignment we can assume.
3181 If PARTIAL and REG are both nonzero, then copy that many of the first
3182 words of X into registers starting with REG, and push the rest of X.
3183 The amount of space pushed is decreased by PARTIAL words,
3184 rounded *down* to a multiple of PARM_BOUNDARY.
3185 REG must be a hard register in this case.
3186 If REG is zero but PARTIAL is not, take any all others actions for an
3187 argument partially in registers, but do not actually load any
3190 EXTRA is the amount in bytes of extra space to leave next to this arg.
3191 This is ignored if an argument block has already been allocated.
3193 On a machine that lacks real push insns, ARGS_ADDR is the address of
3194 the bottom of the argument block for this call. We use indexing off there
3195 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3196 argument block has not been preallocated.
3198 ARGS_SO_FAR is the size of args previously pushed for this call.
3200 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3201 for arguments passed in registers. If nonzero, it will be the number
3202 of bytes required. */
3205 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3206 args_addr, args_so_far, reg_parm_stack_space,
3209 enum machine_mode mode;
3218 int reg_parm_stack_space;
3222 enum direction stack_direction
3223 #ifdef STACK_GROWS_DOWNWARD
3229 /* Decide where to pad the argument: `downward' for below,
3230 `upward' for above, or `none' for don't pad it.
3231 Default is below for small data on big-endian machines; else above. */
3232 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3234 /* Invert direction if stack is post-update. */
3235 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3236 if (where_pad != none)
3237 where_pad = (where_pad == downward ? upward : downward);
3239 xinner = x = protect_from_queue (x, 0);
3241 if (mode == BLKmode)
3243 /* Copy a block into the stack, entirely or partially. */
3246 int used = partial * UNITS_PER_WORD;
3247 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3255 /* USED is now the # of bytes we need not copy to the stack
3256 because registers will take care of them. */
3259 xinner = adjust_address (xinner, BLKmode, used);
3261 /* If the partial register-part of the arg counts in its stack size,
3262 skip the part of stack space corresponding to the registers.
3263 Otherwise, start copying to the beginning of the stack space,
3264 by setting SKIP to 0. */
3265 skip = (reg_parm_stack_space == 0) ? 0 : used;
3267 #ifdef PUSH_ROUNDING
3268 /* Do it with several push insns if that doesn't take lots of insns
3269 and if there is no difficulty with push insns that skip bytes
3270 on the stack for alignment purposes. */
3273 && GET_CODE (size) == CONST_INT
3275 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3276 /* Here we avoid the case of a structure whose weak alignment
3277 forces many pushes of a small amount of data,
3278 and such small pushes do rounding that causes trouble. */
3279 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3280 || align >= BIGGEST_ALIGNMENT
3281 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3282 == (align / BITS_PER_UNIT)))
3283 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3285 /* Push padding now if padding above and stack grows down,
3286 or if padding below and stack grows up.
3287 But if space already allocated, this has already been done. */
3288 if (extra && args_addr == 0
3289 && where_pad != none && where_pad != stack_direction)
3290 anti_adjust_stack (GEN_INT (extra));
3292 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3294 if (current_function_check_memory_usage && ! in_check_memory_usage)
3298 in_check_memory_usage = 1;
3299 temp = get_push_address (INTVAL (size) - used);
3300 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3301 emit_library_call (chkr_copy_bitmap_libfunc,
3302 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3303 Pmode, XEXP (xinner, 0), Pmode,
3304 GEN_INT (INTVAL (size) - used),
3305 TYPE_MODE (sizetype));
3307 emit_library_call (chkr_set_right_libfunc,
3308 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3309 Pmode, GEN_INT (INTVAL (size) - used),
3310 TYPE_MODE (sizetype),
3311 GEN_INT (MEMORY_USE_RW),
3312 TYPE_MODE (integer_type_node));
3313 in_check_memory_usage = 0;
3317 #endif /* PUSH_ROUNDING */
3321 /* Otherwise make space on the stack and copy the data
3322 to the address of that space. */
3324 /* Deduct words put into registers from the size we must copy. */
3327 if (GET_CODE (size) == CONST_INT)
3328 size = GEN_INT (INTVAL (size) - used);
3330 size = expand_binop (GET_MODE (size), sub_optab, size,
3331 GEN_INT (used), NULL_RTX, 0,
3335 /* Get the address of the stack space.
3336 In this case, we do not deal with EXTRA separately.
3337 A single stack adjust will do. */
3340 temp = push_block (size, extra, where_pad == downward);
3343 else if (GET_CODE (args_so_far) == CONST_INT)
3344 temp = memory_address (BLKmode,
3345 plus_constant (args_addr,
3346 skip + INTVAL (args_so_far)));
3348 temp = memory_address (BLKmode,
3349 plus_constant (gen_rtx_PLUS (Pmode,
3353 if (current_function_check_memory_usage && ! in_check_memory_usage)
3355 in_check_memory_usage = 1;
3356 target = copy_to_reg (temp);
3357 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3358 emit_library_call (chkr_copy_bitmap_libfunc,
3359 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3361 XEXP (xinner, 0), Pmode,
3362 size, TYPE_MODE (sizetype));
3364 emit_library_call (chkr_set_right_libfunc,
3365 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3367 size, TYPE_MODE (sizetype),
3368 GEN_INT (MEMORY_USE_RW),
3369 TYPE_MODE (integer_type_node));
3370 in_check_memory_usage = 0;
3373 target = gen_rtx_MEM (BLKmode, temp);
3377 set_mem_attributes (target, type, 1);
3378 /* Function incoming arguments may overlap with sibling call
3379 outgoing arguments and we cannot allow reordering of reads
3380 from function arguments with stores to outgoing arguments
3381 of sibling calls. */
3382 MEM_ALIAS_SET (target) = 0;
3385 /* TEMP is the address of the block. Copy the data there. */
3386 if (GET_CODE (size) == CONST_INT
3387 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3389 move_by_pieces (target, xinner, INTVAL (size), align);
3394 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3395 enum machine_mode mode;
3397 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3399 mode = GET_MODE_WIDER_MODE (mode))
3401 enum insn_code code = movstr_optab[(int) mode];
3402 insn_operand_predicate_fn pred;
3404 if (code != CODE_FOR_nothing
3405 && ((GET_CODE (size) == CONST_INT
3406 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3407 <= (GET_MODE_MASK (mode) >> 1)))
3408 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3409 && (!(pred = insn_data[(int) code].operand[0].predicate)
3410 || ((*pred) (target, BLKmode)))
3411 && (!(pred = insn_data[(int) code].operand[1].predicate)
3412 || ((*pred) (xinner, BLKmode)))
3413 && (!(pred = insn_data[(int) code].operand[3].predicate)
3414 || ((*pred) (opalign, VOIDmode))))
3416 rtx op2 = convert_to_mode (mode, size, 1);
3417 rtx last = get_last_insn ();
3420 pred = insn_data[(int) code].operand[2].predicate;
3421 if (pred != 0 && ! (*pred) (op2, mode))
3422 op2 = copy_to_mode_reg (mode, op2);
3424 pat = GEN_FCN ((int) code) (target, xinner,
3432 delete_insns_since (last);
3437 if (!ACCUMULATE_OUTGOING_ARGS)
3439 /* If the source is referenced relative to the stack pointer,
3440 copy it to another register to stabilize it. We do not need
3441 to do this if we know that we won't be changing sp. */
3443 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3444 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3445 temp = copy_to_reg (temp);
3448 /* Make inhibit_defer_pop nonzero around the library call
3449 to force it to pop the bcopy-arguments right away. */
3451 #ifdef TARGET_MEM_FUNCTIONS
3452 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3453 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3454 convert_to_mode (TYPE_MODE (sizetype),
3455 size, TREE_UNSIGNED (sizetype)),
3456 TYPE_MODE (sizetype));
3458 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3459 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3460 convert_to_mode (TYPE_MODE (integer_type_node),
3462 TREE_UNSIGNED (integer_type_node)),
3463 TYPE_MODE (integer_type_node));
3468 else if (partial > 0)
3470 /* Scalar partly in registers. */
3472 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3475 /* # words of start of argument
3476 that we must make space for but need not store. */
3477 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3478 int args_offset = INTVAL (args_so_far);
3481 /* Push padding now if padding above and stack grows down,
3482 or if padding below and stack grows up.
3483 But if space already allocated, this has already been done. */
3484 if (extra && args_addr == 0
3485 && where_pad != none && where_pad != stack_direction)
3486 anti_adjust_stack (GEN_INT (extra));
3488 /* If we make space by pushing it, we might as well push
3489 the real data. Otherwise, we can leave OFFSET nonzero
3490 and leave the space uninitialized. */
3494 /* Now NOT_STACK gets the number of words that we don't need to
3495 allocate on the stack. */
3496 not_stack = partial - offset;
3498 /* If the partial register-part of the arg counts in its stack size,
3499 skip the part of stack space corresponding to the registers.
3500 Otherwise, start copying to the beginning of the stack space,
3501 by setting SKIP to 0. */
3502 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3504 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3505 x = validize_mem (force_const_mem (mode, x));
3507 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3508 SUBREGs of such registers are not allowed. */
3509 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3510 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3511 x = copy_to_reg (x);
3513 /* Loop over all the words allocated on the stack for this arg. */
3514 /* We can do it by words, because any scalar bigger than a word
3515 has a size a multiple of a word. */
3516 #ifndef PUSH_ARGS_REVERSED
3517 for (i = not_stack; i < size; i++)
3519 for (i = size - 1; i >= not_stack; i--)
3521 if (i >= not_stack + offset)
3522 emit_push_insn (operand_subword_force (x, i, mode),
3523 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3525 GEN_INT (args_offset + ((i - not_stack + skip)
3527 reg_parm_stack_space, alignment_pad);
3532 rtx target = NULL_RTX;
3535 /* Push padding now if padding above and stack grows down,
3536 or if padding below and stack grows up.
3537 But if space already allocated, this has already been done. */
3538 if (extra && args_addr == 0
3539 && where_pad != none && where_pad != stack_direction)
3540 anti_adjust_stack (GEN_INT (extra));
3542 #ifdef PUSH_ROUNDING
3543 if (args_addr == 0 && PUSH_ARGS)
3544 emit_single_push_insn (mode, x, type);
3548 if (GET_CODE (args_so_far) == CONST_INT)
3550 = memory_address (mode,
3551 plus_constant (args_addr,
3552 INTVAL (args_so_far)));
3554 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3557 dest = gen_rtx_MEM (mode, addr);
3560 set_mem_attributes (dest, type, 1);
3561 /* Function incoming arguments may overlap with sibling call
3562 outgoing arguments and we cannot allow reordering of reads
3563 from function arguments with stores to outgoing arguments
3564 of sibling calls. */
3565 MEM_ALIAS_SET (dest) = 0;
3568 emit_move_insn (dest, x);
3572 if (current_function_check_memory_usage && ! in_check_memory_usage)
3574 in_check_memory_usage = 1;
3576 target = get_push_address (GET_MODE_SIZE (mode));
3578 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3579 emit_library_call (chkr_copy_bitmap_libfunc,
3580 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3581 Pmode, XEXP (x, 0), Pmode,
3582 GEN_INT (GET_MODE_SIZE (mode)),
3583 TYPE_MODE (sizetype));
3585 emit_library_call (chkr_set_right_libfunc,
3586 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3587 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3588 TYPE_MODE (sizetype),
3589 GEN_INT (MEMORY_USE_RW),
3590 TYPE_MODE (integer_type_node));
3591 in_check_memory_usage = 0;
3596 /* If part should go in registers, copy that part
3597 into the appropriate registers. Do this now, at the end,
3598 since mem-to-mem copies above may do function calls. */
3599 if (partial > 0 && reg != 0)
3601 /* Handle calls that pass values in multiple non-contiguous locations.
3602 The Irix 6 ABI has examples of this. */
3603 if (GET_CODE (reg) == PARALLEL)
3604 emit_group_load (reg, x, -1, align); /* ??? size? */
3606 move_block_to_reg (REGNO (reg), x, partial, mode);
3609 if (extra && args_addr == 0 && where_pad == stack_direction)
3610 anti_adjust_stack (GEN_INT (extra));
3612 if (alignment_pad && args_addr == 0)
3613 anti_adjust_stack (alignment_pad);
3616 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3624 /* Only registers can be subtargets. */
3625 || GET_CODE (x) != REG
3626 /* If the register is readonly, it can't be set more than once. */
3627 || RTX_UNCHANGING_P (x)
3628 /* Don't use hard regs to avoid extending their life. */
3629 || REGNO (x) < FIRST_PSEUDO_REGISTER
3630 /* Avoid subtargets inside loops,
3631 since they hide some invariant expressions. */
3632 || preserve_subexpressions_p ())
3636 /* Expand an assignment that stores the value of FROM into TO.
3637 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3638 (This may contain a QUEUED rtx;
3639 if the value is constant, this rtx is a constant.)
3640 Otherwise, the returned value is NULL_RTX.
3642 SUGGEST_REG is no longer actually used.
3643 It used to mean, copy the value through a register
3644 and return that register, if that is possible.
3645 We now use WANT_VALUE to decide whether to do this. */
3648 expand_assignment (to, from, want_value, suggest_reg)
3651 int suggest_reg ATTRIBUTE_UNUSED;
3653 register rtx to_rtx = 0;
3656 /* Don't crash if the lhs of the assignment was erroneous. */
3658 if (TREE_CODE (to) == ERROR_MARK)
3660 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3661 return want_value ? result : NULL_RTX;
3664 /* Assignment of a structure component needs special treatment
3665 if the structure component's rtx is not simply a MEM.
3666 Assignment of an array element at a constant index, and assignment of
3667 an array element in an unaligned packed structure field, has the same
3670 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3671 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3673 enum machine_mode mode1;
3674 HOST_WIDE_INT bitsize, bitpos;
3679 unsigned int alignment;
3682 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3683 &unsignedp, &volatilep, &alignment);
3685 /* If we are going to use store_bit_field and extract_bit_field,
3686 make sure to_rtx will be safe for multiple use. */
3688 if (mode1 == VOIDmode && want_value)
3689 tem = stabilize_reference (tem);
3691 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3694 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3696 if (GET_CODE (to_rtx) != MEM)
3699 if (GET_MODE (offset_rtx) != ptr_mode)
3701 #ifdef POINTERS_EXTEND_UNSIGNED
3702 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3704 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3708 /* A constant address in TO_RTX can have VOIDmode, we must not try
3709 to call force_reg for that case. Avoid that case. */
3710 if (GET_CODE (to_rtx) == MEM
3711 && GET_MODE (to_rtx) == BLKmode
3712 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3714 && (bitpos % bitsize) == 0
3715 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3716 && alignment == GET_MODE_ALIGNMENT (mode1))
3719 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3721 if (GET_CODE (XEXP (temp, 0)) == REG)
3724 to_rtx = change_address (to_rtx, mode1,
3725 force_reg (GET_MODE (XEXP (temp, 0)),
3730 to_rtx = change_address (to_rtx, VOIDmode,
3731 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3732 force_reg (ptr_mode,
3738 if (GET_CODE (to_rtx) == MEM)
3740 /* When the offset is zero, to_rtx is the address of the
3741 structure we are storing into, and hence may be shared.
3742 We must make a new MEM before setting the volatile bit. */
3744 to_rtx = copy_rtx (to_rtx);
3746 MEM_VOLATILE_P (to_rtx) = 1;
3748 #if 0 /* This was turned off because, when a field is volatile
3749 in an object which is not volatile, the object may be in a register,
3750 and then we would abort over here. */
3756 if (TREE_CODE (to) == COMPONENT_REF
3757 && TREE_READONLY (TREE_OPERAND (to, 1)))
3760 to_rtx = copy_rtx (to_rtx);
3762 RTX_UNCHANGING_P (to_rtx) = 1;
3765 /* Check the access. */
3766 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3771 enum machine_mode best_mode;
3773 best_mode = get_best_mode (bitsize, bitpos,
3774 TYPE_ALIGN (TREE_TYPE (tem)),
3776 if (best_mode == VOIDmode)
3779 best_mode_size = GET_MODE_BITSIZE (best_mode);
3780 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3781 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3782 size *= GET_MODE_SIZE (best_mode);
3784 /* Check the access right of the pointer. */
3785 in_check_memory_usage = 1;
3787 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3788 VOIDmode, 3, to_addr, Pmode,
3789 GEN_INT (size), TYPE_MODE (sizetype),
3790 GEN_INT (MEMORY_USE_WO),
3791 TYPE_MODE (integer_type_node));
3792 in_check_memory_usage = 0;
3795 /* If this is a varying-length object, we must get the address of
3796 the source and do an explicit block move. */
3799 unsigned int from_align;
3800 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3802 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3804 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3805 MIN (alignment, from_align));
3812 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3814 /* Spurious cast for HPUX compiler. */
3815 ? ((enum machine_mode)
3816 TYPE_MODE (TREE_TYPE (to)))
3820 int_size_in_bytes (TREE_TYPE (tem)),
3821 get_alias_set (to));
3823 preserve_temp_slots (result);
3827 /* If the value is meaningful, convert RESULT to the proper mode.
3828 Otherwise, return nothing. */
3829 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3830 TYPE_MODE (TREE_TYPE (from)),
3832 TREE_UNSIGNED (TREE_TYPE (to)))
3837 /* If the rhs is a function call and its value is not an aggregate,
3838 call the function before we start to compute the lhs.
3839 This is needed for correct code for cases such as
3840 val = setjmp (buf) on machines where reference to val
3841 requires loading up part of an address in a separate insn.
3843 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3844 since it might be a promoted variable where the zero- or sign- extension
3845 needs to be done. Handling this in the normal way is safe because no
3846 computation is done before the call. */
3847 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3848 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3849 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3850 && GET_CODE (DECL_RTL (to)) == REG))
3855 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3857 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3859 /* Handle calls that return values in multiple non-contiguous locations.
3860 The Irix 6 ABI has examples of this. */
3861 if (GET_CODE (to_rtx) == PARALLEL)
3862 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3863 TYPE_ALIGN (TREE_TYPE (from)));
3864 else if (GET_MODE (to_rtx) == BLKmode)
3865 emit_block_move (to_rtx, value, expr_size (from),
3866 TYPE_ALIGN (TREE_TYPE (from)));
3869 #ifdef POINTERS_EXTEND_UNSIGNED
3870 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3871 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3872 value = convert_memory_address (GET_MODE (to_rtx), value);
3874 emit_move_insn (to_rtx, value);
3876 preserve_temp_slots (to_rtx);
3879 return want_value ? to_rtx : NULL_RTX;
3882 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3883 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3887 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3888 if (GET_CODE (to_rtx) == MEM)
3889 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3892 /* Don't move directly into a return register. */
3893 if (TREE_CODE (to) == RESULT_DECL
3894 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3899 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3901 if (GET_CODE (to_rtx) == PARALLEL)
3902 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3903 TYPE_ALIGN (TREE_TYPE (from)));
3905 emit_move_insn (to_rtx, temp);
3907 preserve_temp_slots (to_rtx);
3910 return want_value ? to_rtx : NULL_RTX;
3913 /* In case we are returning the contents of an object which overlaps
3914 the place the value is being stored, use a safe function when copying
3915 a value through a pointer into a structure value return block. */
3916 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3917 && current_function_returns_struct
3918 && !current_function_returns_pcc_struct)
3923 size = expr_size (from);
3924 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3925 EXPAND_MEMORY_USE_DONT);
3927 /* Copy the rights of the bitmap. */
3928 if (current_function_check_memory_usage)
3929 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3930 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3931 XEXP (from_rtx, 0), Pmode,
3932 convert_to_mode (TYPE_MODE (sizetype),
3933 size, TREE_UNSIGNED (sizetype)),
3934 TYPE_MODE (sizetype));
3936 #ifdef TARGET_MEM_FUNCTIONS
3937 emit_library_call (memmove_libfunc, LCT_NORMAL,
3938 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3939 XEXP (from_rtx, 0), Pmode,
3940 convert_to_mode (TYPE_MODE (sizetype),
3941 size, TREE_UNSIGNED (sizetype)),
3942 TYPE_MODE (sizetype));
3944 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3945 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3946 XEXP (to_rtx, 0), Pmode,
3947 convert_to_mode (TYPE_MODE (integer_type_node),
3948 size, TREE_UNSIGNED (integer_type_node)),
3949 TYPE_MODE (integer_type_node));
3952 preserve_temp_slots (to_rtx);
3955 return want_value ? to_rtx : NULL_RTX;
3958 /* Compute FROM and store the value in the rtx we got. */
3961 result = store_expr (from, to_rtx, want_value);
3962 preserve_temp_slots (result);
3965 return want_value ? result : NULL_RTX;
3968 /* Generate code for computing expression EXP,
3969 and storing the value into TARGET.
3970 TARGET may contain a QUEUED rtx.
3972 If WANT_VALUE is nonzero, return a copy of the value
3973 not in TARGET, so that we can be sure to use the proper
3974 value in a containing expression even if TARGET has something
3975 else stored in it. If possible, we copy the value through a pseudo
3976 and return that pseudo. Or, if the value is constant, we try to
3977 return the constant. In some cases, we return a pseudo
3978 copied *from* TARGET.
3980 If the mode is BLKmode then we may return TARGET itself.
3981 It turns out that in BLKmode it doesn't cause a problem.
3982 because C has no operators that could combine two different
3983 assignments into the same BLKmode object with different values
3984 with no sequence point. Will other languages need this to
3987 If WANT_VALUE is 0, we return NULL, to make sure
3988 to catch quickly any cases where the caller uses the value
3989 and fails to set WANT_VALUE. */
3992 store_expr (exp, target, want_value)
3994 register rtx target;
3998 int dont_return_target = 0;
3999 int dont_store_target = 0;
4001 if (TREE_CODE (exp) == COMPOUND_EXPR)
4003 /* Perform first part of compound expression, then assign from second
4005 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4007 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4009 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4011 /* For conditional expression, get safe form of the target. Then
4012 test the condition, doing the appropriate assignment on either
4013 side. This avoids the creation of unnecessary temporaries.
4014 For non-BLKmode, it is more efficient not to do this. */
4016 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4019 target = protect_from_queue (target, 1);
4021 do_pending_stack_adjust ();
4023 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4024 start_cleanup_deferral ();
4025 store_expr (TREE_OPERAND (exp, 1), target, 0);
4026 end_cleanup_deferral ();
4028 emit_jump_insn (gen_jump (lab2));
4031 start_cleanup_deferral ();
4032 store_expr (TREE_OPERAND (exp, 2), target, 0);
4033 end_cleanup_deferral ();
4038 return want_value ? target : NULL_RTX;
4040 else if (queued_subexp_p (target))
4041 /* If target contains a postincrement, let's not risk
4042 using it as the place to generate the rhs. */
4044 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4046 /* Expand EXP into a new pseudo. */
4047 temp = gen_reg_rtx (GET_MODE (target));
4048 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4051 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4053 /* If target is volatile, ANSI requires accessing the value
4054 *from* the target, if it is accessed. So make that happen.
4055 In no case return the target itself. */
4056 if (! MEM_VOLATILE_P (target) && want_value)
4057 dont_return_target = 1;
4059 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4060 && GET_MODE (target) != BLKmode)
4061 /* If target is in memory and caller wants value in a register instead,
4062 arrange that. Pass TARGET as target for expand_expr so that,
4063 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4064 We know expand_expr will not use the target in that case.
4065 Don't do this if TARGET is volatile because we are supposed
4066 to write it and then read it. */
4068 temp = expand_expr (exp, target, GET_MODE (target), 0);
4069 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4071 /* If TEMP is already in the desired TARGET, only copy it from
4072 memory and don't store it there again. */
4074 || (rtx_equal_p (temp, target)
4075 && ! side_effects_p (temp) && ! side_effects_p (target)))
4076 dont_store_target = 1;
4077 temp = copy_to_reg (temp);
4079 dont_return_target = 1;
4081 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4082 /* If this is an scalar in a register that is stored in a wider mode
4083 than the declared mode, compute the result into its declared mode
4084 and then convert to the wider mode. Our value is the computed
4087 /* If we don't want a value, we can do the conversion inside EXP,
4088 which will often result in some optimizations. Do the conversion
4089 in two steps: first change the signedness, if needed, then
4090 the extend. But don't do this if the type of EXP is a subtype
4091 of something else since then the conversion might involve
4092 more than just converting modes. */
4093 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4094 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4096 if (TREE_UNSIGNED (TREE_TYPE (exp))
4097 != SUBREG_PROMOTED_UNSIGNED_P (target))
4100 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4104 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4105 SUBREG_PROMOTED_UNSIGNED_P (target)),
4109 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4111 /* If TEMP is a volatile MEM and we want a result value, make
4112 the access now so it gets done only once. Likewise if
4113 it contains TARGET. */
4114 if (GET_CODE (temp) == MEM && want_value
4115 && (MEM_VOLATILE_P (temp)
4116 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4117 temp = copy_to_reg (temp);
4119 /* If TEMP is a VOIDmode constant, use convert_modes to make
4120 sure that we properly convert it. */
4121 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4122 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4123 TYPE_MODE (TREE_TYPE (exp)), temp,
4124 SUBREG_PROMOTED_UNSIGNED_P (target));
4126 convert_move (SUBREG_REG (target), temp,
4127 SUBREG_PROMOTED_UNSIGNED_P (target));
4129 /* If we promoted a constant, change the mode back down to match
4130 target. Otherwise, the caller might get confused by a result whose
4131 mode is larger than expected. */
4133 if (want_value && GET_MODE (temp) != GET_MODE (target)
4134 && GET_MODE (temp) != VOIDmode)
4136 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4137 SUBREG_PROMOTED_VAR_P (temp) = 1;
4138 SUBREG_PROMOTED_UNSIGNED_P (temp)
4139 = SUBREG_PROMOTED_UNSIGNED_P (target);
4142 return want_value ? temp : NULL_RTX;
4146 temp = expand_expr (exp, target, GET_MODE (target), 0);
4147 /* Return TARGET if it's a specified hardware register.
4148 If TARGET is a volatile mem ref, either return TARGET
4149 or return a reg copied *from* TARGET; ANSI requires this.
4151 Otherwise, if TEMP is not TARGET, return TEMP
4152 if it is constant (for efficiency),
4153 or if we really want the correct value. */
4154 if (!(target && GET_CODE (target) == REG
4155 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4156 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4157 && ! rtx_equal_p (temp, target)
4158 && (CONSTANT_P (temp) || want_value))
4159 dont_return_target = 1;
4162 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4163 the same as that of TARGET, adjust the constant. This is needed, for
4164 example, in case it is a CONST_DOUBLE and we want only a word-sized
4166 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4167 && TREE_CODE (exp) != ERROR_MARK
4168 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4169 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4170 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4172 if (current_function_check_memory_usage
4173 && GET_CODE (target) == MEM
4174 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4176 in_check_memory_usage = 1;
4177 if (GET_CODE (temp) == MEM)
4178 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4179 VOIDmode, 3, XEXP (target, 0), Pmode,
4180 XEXP (temp, 0), Pmode,
4181 expr_size (exp), TYPE_MODE (sizetype));
4183 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4184 VOIDmode, 3, XEXP (target, 0), Pmode,
4185 expr_size (exp), TYPE_MODE (sizetype),
4186 GEN_INT (MEMORY_USE_WO),
4187 TYPE_MODE (integer_type_node));
4188 in_check_memory_usage = 0;
4191 /* If value was not generated in the target, store it there.
4192 Convert the value to TARGET's type first if nec. */
4193 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4194 one or both of them are volatile memory refs, we have to distinguish
4196 - expand_expr has used TARGET. In this case, we must not generate
4197 another copy. This can be detected by TARGET being equal according
4199 - expand_expr has not used TARGET - that means that the source just
4200 happens to have the same RTX form. Since temp will have been created
4201 by expand_expr, it will compare unequal according to == .
4202 We must generate a copy in this case, to reach the correct number
4203 of volatile memory references. */
4205 if ((! rtx_equal_p (temp, target)
4206 || (temp != target && (side_effects_p (temp)
4207 || side_effects_p (target))))
4208 && TREE_CODE (exp) != ERROR_MARK
4209 && ! dont_store_target)
4211 target = protect_from_queue (target, 1);
4212 if (GET_MODE (temp) != GET_MODE (target)
4213 && GET_MODE (temp) != VOIDmode)
4215 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4216 if (dont_return_target)
4218 /* In this case, we will return TEMP,
4219 so make sure it has the proper mode.
4220 But don't forget to store the value into TARGET. */
4221 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4222 emit_move_insn (target, temp);
4225 convert_move (target, temp, unsignedp);
4228 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4230 /* Handle copying a string constant into an array.
4231 The string constant may be shorter than the array.
4232 So copy just the string's actual length, and clear the rest. */
4236 /* Get the size of the data type of the string,
4237 which is actually the size of the target. */
4238 size = expr_size (exp);
4239 if (GET_CODE (size) == CONST_INT
4240 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4241 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4244 /* Compute the size of the data to copy from the string. */
4246 = size_binop (MIN_EXPR,
4247 make_tree (sizetype, size),
4248 size_int (TREE_STRING_LENGTH (exp)));
4249 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4250 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4254 /* Copy that much. */
4255 emit_block_move (target, temp, copy_size_rtx,
4256 TYPE_ALIGN (TREE_TYPE (exp)));
4258 /* Figure out how much is left in TARGET that we have to clear.
4259 Do all calculations in ptr_mode. */
4261 addr = XEXP (target, 0);
4262 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4264 if (GET_CODE (copy_size_rtx) == CONST_INT)
4266 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4267 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4269 (unsigned int) (BITS_PER_UNIT
4270 * (INTVAL (copy_size_rtx)
4271 & - INTVAL (copy_size_rtx))));
4275 addr = force_reg (ptr_mode, addr);
4276 addr = expand_binop (ptr_mode, add_optab, addr,
4277 copy_size_rtx, NULL_RTX, 0,
4280 size = expand_binop (ptr_mode, sub_optab, size,
4281 copy_size_rtx, NULL_RTX, 0,
4284 align = BITS_PER_UNIT;
4285 label = gen_label_rtx ();
4286 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4287 GET_MODE (size), 0, 0, label);
4289 align = MIN (align, expr_align (copy_size));
4291 if (size != const0_rtx)
4293 rtx dest = gen_rtx_MEM (BLKmode, addr);
4295 MEM_COPY_ATTRIBUTES (dest, target);
4297 /* Be sure we can write on ADDR. */
4298 in_check_memory_usage = 1;
4299 if (current_function_check_memory_usage)
4300 emit_library_call (chkr_check_addr_libfunc,
4301 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4303 size, TYPE_MODE (sizetype),
4304 GEN_INT (MEMORY_USE_WO),
4305 TYPE_MODE (integer_type_node));
4306 in_check_memory_usage = 0;
4307 clear_storage (dest, size, align);
4314 /* Handle calls that return values in multiple non-contiguous locations.
4315 The Irix 6 ABI has examples of this. */
4316 else if (GET_CODE (target) == PARALLEL)
4317 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4318 TYPE_ALIGN (TREE_TYPE (exp)));
4319 else if (GET_MODE (temp) == BLKmode)
4320 emit_block_move (target, temp, expr_size (exp),
4321 TYPE_ALIGN (TREE_TYPE (exp)));
4323 emit_move_insn (target, temp);
4326 /* If we don't want a value, return NULL_RTX. */
4330 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4331 ??? The latter test doesn't seem to make sense. */
4332 else if (dont_return_target && GET_CODE (temp) != MEM)
4335 /* Return TARGET itself if it is a hard register. */
4336 else if (want_value && GET_MODE (target) != BLKmode
4337 && ! (GET_CODE (target) == REG
4338 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4339 return copy_to_reg (target);
4345 /* Return 1 if EXP just contains zeros. */
4353 switch (TREE_CODE (exp))
4357 case NON_LVALUE_EXPR:
4358 return is_zeros_p (TREE_OPERAND (exp, 0));
4361 return integer_zerop (exp);
4365 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4368 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4371 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4372 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4373 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4374 if (! is_zeros_p (TREE_VALUE (elt)))
4384 /* Return 1 if EXP contains mostly (3/4) zeros. */
4387 mostly_zeros_p (exp)
4390 if (TREE_CODE (exp) == CONSTRUCTOR)
4392 int elts = 0, zeros = 0;
4393 tree elt = CONSTRUCTOR_ELTS (exp);
4394 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4396 /* If there are no ranges of true bits, it is all zero. */
4397 return elt == NULL_TREE;
4399 for (; elt; elt = TREE_CHAIN (elt))
4401 /* We do not handle the case where the index is a RANGE_EXPR,
4402 so the statistic will be somewhat inaccurate.
4403 We do make a more accurate count in store_constructor itself,
4404 so since this function is only used for nested array elements,
4405 this should be close enough. */
4406 if (mostly_zeros_p (TREE_VALUE (elt)))
4411 return 4 * zeros >= 3 * elts;
4414 return is_zeros_p (exp);
4417 /* Helper function for store_constructor.
4418 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4419 TYPE is the type of the CONSTRUCTOR, not the element type.
4420 ALIGN and CLEARED are as for store_constructor.
4421 ALIAS_SET is the alias set to use for any stores.
4423 This provides a recursive shortcut back to store_constructor when it isn't
4424 necessary to go through store_field. This is so that we can pass through
4425 the cleared field to let store_constructor know that we may not have to
4426 clear a substructure if the outer structure has already been cleared. */
4429 store_constructor_field (target, bitsize, bitpos,
4430 mode, exp, type, align, cleared, alias_set)
4432 unsigned HOST_WIDE_INT bitsize;
4433 HOST_WIDE_INT bitpos;
4434 enum machine_mode mode;
4440 if (TREE_CODE (exp) == CONSTRUCTOR
4441 && bitpos % BITS_PER_UNIT == 0
4442 /* If we have a non-zero bitpos for a register target, then we just
4443 let store_field do the bitfield handling. This is unlikely to
4444 generate unnecessary clear instructions anyways. */
4445 && (bitpos == 0 || GET_CODE (target) == MEM))
4449 = adjust_address (target,
4450 GET_MODE (target) == BLKmode
4452 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4453 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4456 /* Show the alignment may no longer be what it was and update the alias
4457 set, if required. */
4459 align = MIN (align, (unsigned int) bitpos & - bitpos);
4460 if (GET_CODE (target) == MEM)
4461 MEM_ALIAS_SET (target) = alias_set;
4463 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4466 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4467 int_size_in_bytes (type), alias_set);
4470 /* Store the value of constructor EXP into the rtx TARGET.
4471 TARGET is either a REG or a MEM.
4472 ALIGN is the maximum known alignment for TARGET.
4473 CLEARED is true if TARGET is known to have been zero'd.
4474 SIZE is the number of bytes of TARGET we are allowed to modify: this
4475 may not be the same as the size of EXP if we are assigning to a field
4476 which has been packed to exclude padding bits. */
4479 store_constructor (exp, target, align, cleared, size)
4486 tree type = TREE_TYPE (exp);
4487 #ifdef WORD_REGISTER_OPERATIONS
4488 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4491 /* We know our target cannot conflict, since safe_from_p has been called. */
4493 /* Don't try copying piece by piece into a hard register
4494 since that is vulnerable to being clobbered by EXP.
4495 Instead, construct in a pseudo register and then copy it all. */
4496 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4498 rtx temp = gen_reg_rtx (GET_MODE (target));
4499 store_constructor (exp, temp, align, cleared, size);
4500 emit_move_insn (target, temp);
4505 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4506 || TREE_CODE (type) == QUAL_UNION_TYPE)
4510 /* Inform later passes that the whole union value is dead. */
4511 if ((TREE_CODE (type) == UNION_TYPE
4512 || TREE_CODE (type) == QUAL_UNION_TYPE)
4515 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4517 /* If the constructor is empty, clear the union. */
4518 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4519 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4522 /* If we are building a static constructor into a register,
4523 set the initial value as zero so we can fold the value into
4524 a constant. But if more than one register is involved,
4525 this probably loses. */
4526 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4527 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4530 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4535 /* If the constructor has fewer fields than the structure
4536 or if we are initializing the structure to mostly zeros,
4537 clear the whole structure first. Don't do this if TARGET is a
4538 register whose mode size isn't equal to SIZE since clear_storage
4539 can't handle this case. */
4541 && ((list_length (CONSTRUCTOR_ELTS (exp))
4542 != fields_length (type))
4543 || mostly_zeros_p (exp))
4544 && (GET_CODE (target) != REG
4545 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4548 clear_storage (target, GEN_INT (size), align);
4553 /* Inform later passes that the old value is dead. */
4554 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4556 /* Store each element of the constructor into
4557 the corresponding field of TARGET. */
4559 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4561 register tree field = TREE_PURPOSE (elt);
4562 #ifdef WORD_REGISTER_OPERATIONS
4563 tree value = TREE_VALUE (elt);
4565 register enum machine_mode mode;
4566 HOST_WIDE_INT bitsize;
4567 HOST_WIDE_INT bitpos = 0;
4570 rtx to_rtx = target;
4572 /* Just ignore missing fields.
4573 We cleared the whole structure, above,
4574 if any fields are missing. */
4578 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4581 if (host_integerp (DECL_SIZE (field), 1))
4582 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4586 unsignedp = TREE_UNSIGNED (field);
4587 mode = DECL_MODE (field);
4588 if (DECL_BIT_FIELD (field))
4591 offset = DECL_FIELD_OFFSET (field);
4592 if (host_integerp (offset, 0)
4593 && host_integerp (bit_position (field), 0))
4595 bitpos = int_bit_position (field);
4599 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4605 if (contains_placeholder_p (offset))
4606 offset = build (WITH_RECORD_EXPR, sizetype,
4607 offset, make_tree (TREE_TYPE (exp), target));
4609 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4610 if (GET_CODE (to_rtx) != MEM)
4613 if (GET_MODE (offset_rtx) != ptr_mode)
4615 #ifdef POINTERS_EXTEND_UNSIGNED
4616 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4618 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4623 = change_address (to_rtx, VOIDmode,
4624 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4625 force_reg (ptr_mode,
4627 align = DECL_OFFSET_ALIGN (field);
4630 if (TREE_READONLY (field))
4632 if (GET_CODE (to_rtx) == MEM)
4633 to_rtx = copy_rtx (to_rtx);
4635 RTX_UNCHANGING_P (to_rtx) = 1;
4638 #ifdef WORD_REGISTER_OPERATIONS
4639 /* If this initializes a field that is smaller than a word, at the
4640 start of a word, try to widen it to a full word.
4641 This special case allows us to output C++ member function
4642 initializations in a form that the optimizers can understand. */
4643 if (GET_CODE (target) == REG
4644 && bitsize < BITS_PER_WORD
4645 && bitpos % BITS_PER_WORD == 0
4646 && GET_MODE_CLASS (mode) == MODE_INT
4647 && TREE_CODE (value) == INTEGER_CST
4649 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4651 tree type = TREE_TYPE (value);
4652 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4654 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4655 value = convert (type, value);
4657 if (BYTES_BIG_ENDIAN)
4659 = fold (build (LSHIFT_EXPR, type, value,
4660 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4661 bitsize = BITS_PER_WORD;
4665 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4666 TREE_VALUE (elt), type, align, cleared,
4667 (DECL_NONADDRESSABLE_P (field)
4668 && GET_CODE (to_rtx) == MEM)
4669 ? MEM_ALIAS_SET (to_rtx)
4670 : get_alias_set (TREE_TYPE (field)));
4673 else if (TREE_CODE (type) == ARRAY_TYPE)
4678 tree domain = TYPE_DOMAIN (type);
4679 tree elttype = TREE_TYPE (type);
4680 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4681 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4682 HOST_WIDE_INT minelt;
4683 HOST_WIDE_INT maxelt;
4685 /* If we have constant bounds for the range of the type, get them. */
4688 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4689 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4692 /* If the constructor has fewer elements than the array,
4693 clear the whole array first. Similarly if this is
4694 static constructor of a non-BLKmode object. */
4695 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4699 HOST_WIDE_INT count = 0, zero_count = 0;
4700 need_to_clear = ! const_bounds_p;
4702 /* This loop is a more accurate version of the loop in
4703 mostly_zeros_p (it handles RANGE_EXPR in an index).
4704 It is also needed to check for missing elements. */
4705 for (elt = CONSTRUCTOR_ELTS (exp);
4706 elt != NULL_TREE && ! need_to_clear;
4707 elt = TREE_CHAIN (elt))
4709 tree index = TREE_PURPOSE (elt);
4710 HOST_WIDE_INT this_node_count;
4712 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4714 tree lo_index = TREE_OPERAND (index, 0);
4715 tree hi_index = TREE_OPERAND (index, 1);
4717 if (! host_integerp (lo_index, 1)
4718 || ! host_integerp (hi_index, 1))
4724 this_node_count = (tree_low_cst (hi_index, 1)
4725 - tree_low_cst (lo_index, 1) + 1);
4728 this_node_count = 1;
4730 count += this_node_count;
4731 if (mostly_zeros_p (TREE_VALUE (elt)))
4732 zero_count += this_node_count;
4735 /* Clear the entire array first if there are any missing elements,
4736 or if the incidence of zero elements is >= 75%. */
4738 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4742 if (need_to_clear && size > 0)
4745 clear_storage (target, GEN_INT (size), align);
4749 /* Inform later passes that the old value is dead. */
4750 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4752 /* Store each element of the constructor into
4753 the corresponding element of TARGET, determined
4754 by counting the elements. */
4755 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4757 elt = TREE_CHAIN (elt), i++)
4759 register enum machine_mode mode;
4760 HOST_WIDE_INT bitsize;
4761 HOST_WIDE_INT bitpos;
4763 tree value = TREE_VALUE (elt);
4764 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4765 tree index = TREE_PURPOSE (elt);
4766 rtx xtarget = target;
4768 if (cleared && is_zeros_p (value))
4771 unsignedp = TREE_UNSIGNED (elttype);
4772 mode = TYPE_MODE (elttype);
4773 if (mode == BLKmode)
4774 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4775 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4778 bitsize = GET_MODE_BITSIZE (mode);
4780 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4782 tree lo_index = TREE_OPERAND (index, 0);
4783 tree hi_index = TREE_OPERAND (index, 1);
4784 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4785 struct nesting *loop;
4786 HOST_WIDE_INT lo, hi, count;
4789 /* If the range is constant and "small", unroll the loop. */
4791 && host_integerp (lo_index, 0)
4792 && host_integerp (hi_index, 0)
4793 && (lo = tree_low_cst (lo_index, 0),
4794 hi = tree_low_cst (hi_index, 0),
4795 count = hi - lo + 1,
4796 (GET_CODE (target) != MEM
4798 || (host_integerp (TYPE_SIZE (elttype), 1)
4799 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4802 lo -= minelt; hi -= minelt;
4803 for (; lo <= hi; lo++)
4805 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4806 store_constructor_field
4807 (target, bitsize, bitpos, mode, value, type, align,
4809 TYPE_NONALIASED_COMPONENT (type)
4810 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4815 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4816 loop_top = gen_label_rtx ();
4817 loop_end = gen_label_rtx ();
4819 unsignedp = TREE_UNSIGNED (domain);
4821 index = build_decl (VAR_DECL, NULL_TREE, domain);
4824 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4826 SET_DECL_RTL (index, index_r);
4827 if (TREE_CODE (value) == SAVE_EXPR
4828 && SAVE_EXPR_RTL (value) == 0)
4830 /* Make sure value gets expanded once before the
4832 expand_expr (value, const0_rtx, VOIDmode, 0);
4835 store_expr (lo_index, index_r, 0);
4836 loop = expand_start_loop (0);
4838 /* Assign value to element index. */
4840 = convert (ssizetype,
4841 fold (build (MINUS_EXPR, TREE_TYPE (index),
4842 index, TYPE_MIN_VALUE (domain))));
4843 position = size_binop (MULT_EXPR, position,
4845 TYPE_SIZE_UNIT (elttype)));
4847 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4848 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4849 xtarget = change_address (target, mode, addr);
4850 if (TREE_CODE (value) == CONSTRUCTOR)
4851 store_constructor (value, xtarget, align, cleared,
4852 bitsize / BITS_PER_UNIT);
4854 store_expr (value, xtarget, 0);
4856 expand_exit_loop_if_false (loop,
4857 build (LT_EXPR, integer_type_node,
4860 expand_increment (build (PREINCREMENT_EXPR,
4862 index, integer_one_node), 0, 0);
4864 emit_label (loop_end);
4867 else if ((index != 0 && ! host_integerp (index, 0))
4868 || ! host_integerp (TYPE_SIZE (elttype), 1))
4874 index = ssize_int (1);
4877 index = convert (ssizetype,
4878 fold (build (MINUS_EXPR, index,
4879 TYPE_MIN_VALUE (domain))));
4881 position = size_binop (MULT_EXPR, index,
4883 TYPE_SIZE_UNIT (elttype)));
4884 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4885 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4886 xtarget = change_address (target, mode, addr);
4887 store_expr (value, xtarget, 0);
4892 bitpos = ((tree_low_cst (index, 0) - minelt)
4893 * tree_low_cst (TYPE_SIZE (elttype), 1));
4895 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4897 store_constructor_field (target, bitsize, bitpos, mode, value,
4898 type, align, cleared,
4899 TYPE_NONALIASED_COMPONENT (type)
4900 && GET_CODE (target) == MEM
4901 ? MEM_ALIAS_SET (target) :
4902 get_alias_set (elttype));
4908 /* Set constructor assignments. */
4909 else if (TREE_CODE (type) == SET_TYPE)
4911 tree elt = CONSTRUCTOR_ELTS (exp);
4912 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4913 tree domain = TYPE_DOMAIN (type);
4914 tree domain_min, domain_max, bitlength;
4916 /* The default implementation strategy is to extract the constant
4917 parts of the constructor, use that to initialize the target,
4918 and then "or" in whatever non-constant ranges we need in addition.
4920 If a large set is all zero or all ones, it is
4921 probably better to set it using memset (if available) or bzero.
4922 Also, if a large set has just a single range, it may also be
4923 better to first clear all the first clear the set (using
4924 bzero/memset), and set the bits we want. */
4926 /* Check for all zeros. */
4927 if (elt == NULL_TREE && size > 0)
4930 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4934 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4935 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4936 bitlength = size_binop (PLUS_EXPR,
4937 size_diffop (domain_max, domain_min),
4940 nbits = tree_low_cst (bitlength, 1);
4942 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4943 are "complicated" (more than one range), initialize (the
4944 constant parts) by copying from a constant. */
4945 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4946 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4948 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4949 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4950 char *bit_buffer = (char *) alloca (nbits);
4951 HOST_WIDE_INT word = 0;
4952 unsigned int bit_pos = 0;
4953 unsigned int ibit = 0;
4954 unsigned int offset = 0; /* In bytes from beginning of set. */
4956 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4959 if (bit_buffer[ibit])
4961 if (BYTES_BIG_ENDIAN)
4962 word |= (1 << (set_word_size - 1 - bit_pos));
4964 word |= 1 << bit_pos;
4968 if (bit_pos >= set_word_size || ibit == nbits)
4970 if (word != 0 || ! cleared)
4972 rtx datum = GEN_INT (word);
4975 /* The assumption here is that it is safe to use
4976 XEXP if the set is multi-word, but not if
4977 it's single-word. */
4978 if (GET_CODE (target) == MEM)
4979 to_rtx = adjust_address (target, mode, offset);
4980 else if (offset == 0)
4984 emit_move_insn (to_rtx, datum);
4991 offset += set_word_size / BITS_PER_UNIT;
4996 /* Don't bother clearing storage if the set is all ones. */
4997 if (TREE_CHAIN (elt) != NULL_TREE
4998 || (TREE_PURPOSE (elt) == NULL_TREE
5000 : ( ! host_integerp (TREE_VALUE (elt), 0)
5001 || ! host_integerp (TREE_PURPOSE (elt), 0)
5002 || (tree_low_cst (TREE_VALUE (elt), 0)
5003 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5004 != (HOST_WIDE_INT) nbits))))
5005 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5007 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5009 /* Start of range of element or NULL. */
5010 tree startbit = TREE_PURPOSE (elt);
5011 /* End of range of element, or element value. */
5012 tree endbit = TREE_VALUE (elt);
5013 #ifdef TARGET_MEM_FUNCTIONS
5014 HOST_WIDE_INT startb, endb;
5016 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5018 bitlength_rtx = expand_expr (bitlength,
5019 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5021 /* Handle non-range tuple element like [ expr ]. */
5022 if (startbit == NULL_TREE)
5024 startbit = save_expr (endbit);
5028 startbit = convert (sizetype, startbit);
5029 endbit = convert (sizetype, endbit);
5030 if (! integer_zerop (domain_min))
5032 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5033 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5035 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5036 EXPAND_CONST_ADDRESS);
5037 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5038 EXPAND_CONST_ADDRESS);
5044 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5047 emit_move_insn (targetx, target);
5050 else if (GET_CODE (target) == MEM)
5055 #ifdef TARGET_MEM_FUNCTIONS
5056 /* Optimization: If startbit and endbit are
5057 constants divisible by BITS_PER_UNIT,
5058 call memset instead. */
5059 if (TREE_CODE (startbit) == INTEGER_CST
5060 && TREE_CODE (endbit) == INTEGER_CST
5061 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5062 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5064 emit_library_call (memset_libfunc, LCT_NORMAL,
5066 plus_constant (XEXP (targetx, 0),
5067 startb / BITS_PER_UNIT),
5069 constm1_rtx, TYPE_MODE (integer_type_node),
5070 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5071 TYPE_MODE (sizetype));
5075 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5076 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5077 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5078 startbit_rtx, TYPE_MODE (sizetype),
5079 endbit_rtx, TYPE_MODE (sizetype));
5082 emit_move_insn (target, targetx);
5090 /* Store the value of EXP (an expression tree)
5091 into a subfield of TARGET which has mode MODE and occupies
5092 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5093 If MODE is VOIDmode, it means that we are storing into a bit-field.
5095 If VALUE_MODE is VOIDmode, return nothing in particular.
5096 UNSIGNEDP is not used in this case.
5098 Otherwise, return an rtx for the value stored. This rtx
5099 has mode VALUE_MODE if that is convenient to do.
5100 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5102 ALIGN is the alignment that TARGET is known to have.
5103 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5105 ALIAS_SET is the alias set for the destination. This value will
5106 (in general) be different from that for TARGET, since TARGET is a
5107 reference to the containing structure. */
5110 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5111 unsignedp, align, total_size, alias_set)
5113 HOST_WIDE_INT bitsize;
5114 HOST_WIDE_INT bitpos;
5115 enum machine_mode mode;
5117 enum machine_mode value_mode;
5120 HOST_WIDE_INT total_size;
5123 HOST_WIDE_INT width_mask = 0;
5125 if (TREE_CODE (exp) == ERROR_MARK)
5128 /* If we have nothing to store, do nothing unless the expression has
5131 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5133 if (bitsize < HOST_BITS_PER_WIDE_INT)
5134 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5136 /* If we are storing into an unaligned field of an aligned union that is
5137 in a register, we may have the mode of TARGET being an integer mode but
5138 MODE == BLKmode. In that case, get an aligned object whose size and
5139 alignment are the same as TARGET and store TARGET into it (we can avoid
5140 the store if the field being stored is the entire width of TARGET). Then
5141 call ourselves recursively to store the field into a BLKmode version of
5142 that object. Finally, load from the object into TARGET. This is not
5143 very efficient in general, but should only be slightly more expensive
5144 than the otherwise-required unaligned accesses. Perhaps this can be
5145 cleaned up later. */
5148 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5152 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5155 rtx blk_object = copy_rtx (object);
5157 PUT_MODE (blk_object, BLKmode);
5159 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5160 emit_move_insn (object, target);
5162 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5163 align, total_size, alias_set);
5165 /* Even though we aren't returning target, we need to
5166 give it the updated value. */
5167 emit_move_insn (target, object);
5172 if (GET_CODE (target) == CONCAT)
5174 /* We're storing into a struct containing a single __complex. */
5178 return store_expr (exp, target, 0);
5181 /* If the structure is in a register or if the component
5182 is a bit field, we cannot use addressing to access it.
5183 Use bit-field techniques or SUBREG to store in it. */
5185 if (mode == VOIDmode
5186 || (mode != BLKmode && ! direct_store[(int) mode]
5187 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5188 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5189 || GET_CODE (target) == REG
5190 || GET_CODE (target) == SUBREG
5191 /* If the field isn't aligned enough to store as an ordinary memref,
5192 store it as a bit field. */
5193 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5194 && (align < GET_MODE_ALIGNMENT (mode)
5195 || bitpos % GET_MODE_ALIGNMENT (mode)))
5196 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5197 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5198 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5199 /* If the RHS and field are a constant size and the size of the
5200 RHS isn't the same size as the bitfield, we must use bitfield
5203 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5204 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5206 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5208 /* If BITSIZE is narrower than the size of the type of EXP
5209 we will be narrowing TEMP. Normally, what's wanted are the
5210 low-order bits. However, if EXP's type is a record and this is
5211 big-endian machine, we want the upper BITSIZE bits. */
5212 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5213 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5214 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5215 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5216 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5220 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5222 if (mode != VOIDmode && mode != BLKmode
5223 && mode != TYPE_MODE (TREE_TYPE (exp)))
5224 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5226 /* If the modes of TARGET and TEMP are both BLKmode, both
5227 must be in memory and BITPOS must be aligned on a byte
5228 boundary. If so, we simply do a block copy. */
5229 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5231 unsigned int exp_align = expr_align (exp);
5233 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5234 || bitpos % BITS_PER_UNIT != 0)
5237 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5239 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5240 align = MIN (exp_align, align);
5242 /* Find an alignment that is consistent with the bit position. */
5243 while ((bitpos % align) != 0)
5246 emit_block_move (target, temp,
5247 bitsize == -1 ? expr_size (exp)
5248 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5252 return value_mode == VOIDmode ? const0_rtx : target;
5255 /* Store the value in the bitfield. */
5256 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5257 if (value_mode != VOIDmode)
5259 /* The caller wants an rtx for the value. */
5260 /* If possible, avoid refetching from the bitfield itself. */
5262 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5265 enum machine_mode tmode;
5268 return expand_and (temp,
5272 GET_MODE (temp) == VOIDmode
5274 : GET_MODE (temp))), NULL_RTX);
5275 tmode = GET_MODE (temp);
5276 if (tmode == VOIDmode)
5278 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5279 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5280 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5282 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5283 NULL_RTX, value_mode, 0, align,
5290 rtx addr = XEXP (target, 0);
5293 /* If a value is wanted, it must be the lhs;
5294 so make the address stable for multiple use. */
5296 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5297 && ! CONSTANT_ADDRESS_P (addr)
5298 /* A frame-pointer reference is already stable. */
5299 && ! (GET_CODE (addr) == PLUS
5300 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5301 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5302 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5303 addr = copy_to_reg (addr);
5305 /* Now build a reference to just the desired component. */
5307 to_rtx = copy_rtx (change_address (target, mode,
5308 plus_constant (addr,
5310 / BITS_PER_UNIT))));
5311 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5312 /* If the address of the structure varies, then it might be on
5313 the stack. And, stack slots may be shared across scopes.
5314 So, two different structures, of different types, can end up
5315 at the same location. We will give the structures alias set
5316 zero; here we must be careful not to give non-zero alias sets
5318 if (!rtx_varies_p (addr, /*for_alias=*/0))
5319 MEM_ALIAS_SET (to_rtx) = alias_set;
5321 MEM_ALIAS_SET (to_rtx) = 0;
5323 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5327 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5328 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5329 codes and find the ultimate containing object, which we return.
5331 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5332 bit position, and *PUNSIGNEDP to the signedness of the field.
5333 If the position of the field is variable, we store a tree
5334 giving the variable offset (in units) in *POFFSET.
5335 This offset is in addition to the bit position.
5336 If the position is not variable, we store 0 in *POFFSET.
5337 We set *PALIGNMENT to the alignment of the address that will be
5338 computed. This is the alignment of the thing we return if *POFFSET
5339 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5341 If any of the extraction expressions is volatile,
5342 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5344 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5345 is a mode that can be used to access the field. In that case, *PBITSIZE
5348 If the field describes a variable-sized object, *PMODE is set to
5349 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5350 this case, but the address of the object can be found. */
5353 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5354 punsignedp, pvolatilep, palignment)
5356 HOST_WIDE_INT *pbitsize;
5357 HOST_WIDE_INT *pbitpos;
5359 enum machine_mode *pmode;
5362 unsigned int *palignment;
5365 enum machine_mode mode = VOIDmode;
5366 tree offset = size_zero_node;
5367 tree bit_offset = bitsize_zero_node;
5368 unsigned int alignment = BIGGEST_ALIGNMENT;
5371 /* First get the mode, signedness, and size. We do this from just the
5372 outermost expression. */
5373 if (TREE_CODE (exp) == COMPONENT_REF)
5375 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5376 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5377 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5379 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5381 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5383 size_tree = TREE_OPERAND (exp, 1);
5384 *punsignedp = TREE_UNSIGNED (exp);
5388 mode = TYPE_MODE (TREE_TYPE (exp));
5389 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5391 if (mode == BLKmode)
5392 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5394 *pbitsize = GET_MODE_BITSIZE (mode);
5399 if (! host_integerp (size_tree, 1))
5400 mode = BLKmode, *pbitsize = -1;
5402 *pbitsize = tree_low_cst (size_tree, 1);
5405 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5406 and find the ultimate containing object. */
5409 if (TREE_CODE (exp) == BIT_FIELD_REF)
5410 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5411 else if (TREE_CODE (exp) == COMPONENT_REF)
5413 tree field = TREE_OPERAND (exp, 1);
5414 tree this_offset = DECL_FIELD_OFFSET (field);
5416 /* If this field hasn't been filled in yet, don't go
5417 past it. This should only happen when folding expressions
5418 made during type construction. */
5419 if (this_offset == 0)
5421 else if (! TREE_CONSTANT (this_offset)
5422 && contains_placeholder_p (this_offset))
5423 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5425 offset = size_binop (PLUS_EXPR, offset, this_offset);
5426 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5427 DECL_FIELD_BIT_OFFSET (field));
5429 if (! host_integerp (offset, 0))
5430 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5433 else if (TREE_CODE (exp) == ARRAY_REF
5434 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5436 tree index = TREE_OPERAND (exp, 1);
5437 tree array = TREE_OPERAND (exp, 0);
5438 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5439 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5440 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5442 /* We assume all arrays have sizes that are a multiple of a byte.
5443 First subtract the lower bound, if any, in the type of the
5444 index, then convert to sizetype and multiply by the size of the
5446 if (low_bound != 0 && ! integer_zerop (low_bound))
5447 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5450 /* If the index has a self-referential type, pass it to a
5451 WITH_RECORD_EXPR; if the component size is, pass our
5452 component to one. */
5453 if (! TREE_CONSTANT (index)
5454 && contains_placeholder_p (index))
5455 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5456 if (! TREE_CONSTANT (unit_size)
5457 && contains_placeholder_p (unit_size))
5458 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5460 offset = size_binop (PLUS_EXPR, offset,
5461 size_binop (MULT_EXPR,
5462 convert (sizetype, index),
5466 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5467 && ! ((TREE_CODE (exp) == NOP_EXPR
5468 || TREE_CODE (exp) == CONVERT_EXPR)
5469 && (TYPE_MODE (TREE_TYPE (exp))
5470 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5473 /* If any reference in the chain is volatile, the effect is volatile. */
5474 if (TREE_THIS_VOLATILE (exp))
5477 /* If the offset is non-constant already, then we can't assume any
5478 alignment more than the alignment here. */
5479 if (! TREE_CONSTANT (offset))
5480 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5482 exp = TREE_OPERAND (exp, 0);
5486 alignment = MIN (alignment, DECL_ALIGN (exp));
5487 else if (TREE_TYPE (exp) != 0)
5488 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5490 /* If OFFSET is constant, see if we can return the whole thing as a
5491 constant bit position. Otherwise, split it up. */
5492 if (host_integerp (offset, 0)
5493 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5495 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5496 && host_integerp (tem, 0))
5497 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5499 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5502 *palignment = alignment;
5506 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5508 static enum memory_use_mode
5509 get_memory_usage_from_modifier (modifier)
5510 enum expand_modifier modifier;
5516 return MEMORY_USE_RO;
5518 case EXPAND_MEMORY_USE_WO:
5519 return MEMORY_USE_WO;
5521 case EXPAND_MEMORY_USE_RW:
5522 return MEMORY_USE_RW;
5524 case EXPAND_MEMORY_USE_DONT:
5525 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5526 MEMORY_USE_DONT, because they are modifiers to a call of
5527 expand_expr in the ADDR_EXPR case of expand_expr. */
5528 case EXPAND_CONST_ADDRESS:
5529 case EXPAND_INITIALIZER:
5530 return MEMORY_USE_DONT;
5531 case EXPAND_MEMORY_USE_BAD:
5537 /* Given an rtx VALUE that may contain additions and multiplications, return
5538 an equivalent value that just refers to a register, memory, or constant.
5539 This is done by generating instructions to perform the arithmetic and
5540 returning a pseudo-register containing the value.
5542 The returned value may be a REG, SUBREG, MEM or constant. */
5545 force_operand (value, target)
5548 register optab binoptab = 0;
5549 /* Use a temporary to force order of execution of calls to
5553 /* Use subtarget as the target for operand 0 of a binary operation. */
5554 register rtx subtarget = get_subtarget (target);
5556 /* Check for a PIC address load. */
5558 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5559 && XEXP (value, 0) == pic_offset_table_rtx
5560 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5561 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5562 || GET_CODE (XEXP (value, 1)) == CONST))
5565 subtarget = gen_reg_rtx (GET_MODE (value));
5566 emit_move_insn (subtarget, value);
5570 if (GET_CODE (value) == PLUS)
5571 binoptab = add_optab;
5572 else if (GET_CODE (value) == MINUS)
5573 binoptab = sub_optab;
5574 else if (GET_CODE (value) == MULT)
5576 op2 = XEXP (value, 1);
5577 if (!CONSTANT_P (op2)
5578 && !(GET_CODE (op2) == REG && op2 != subtarget))
5580 tmp = force_operand (XEXP (value, 0), subtarget);
5581 return expand_mult (GET_MODE (value), tmp,
5582 force_operand (op2, NULL_RTX),
5588 op2 = XEXP (value, 1);
5589 if (!CONSTANT_P (op2)
5590 && !(GET_CODE (op2) == REG && op2 != subtarget))
5592 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5594 binoptab = add_optab;
5595 op2 = negate_rtx (GET_MODE (value), op2);
5598 /* Check for an addition with OP2 a constant integer and our first
5599 operand a PLUS of a virtual register and something else. In that
5600 case, we want to emit the sum of the virtual register and the
5601 constant first and then add the other value. This allows virtual
5602 register instantiation to simply modify the constant rather than
5603 creating another one around this addition. */
5604 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5605 && GET_CODE (XEXP (value, 0)) == PLUS
5606 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5607 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5608 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5610 rtx temp = expand_binop (GET_MODE (value), binoptab,
5611 XEXP (XEXP (value, 0), 0), op2,
5612 subtarget, 0, OPTAB_LIB_WIDEN);
5613 return expand_binop (GET_MODE (value), binoptab, temp,
5614 force_operand (XEXP (XEXP (value, 0), 1), 0),
5615 target, 0, OPTAB_LIB_WIDEN);
5618 tmp = force_operand (XEXP (value, 0), subtarget);
5619 return expand_binop (GET_MODE (value), binoptab, tmp,
5620 force_operand (op2, NULL_RTX),
5621 target, 0, OPTAB_LIB_WIDEN);
5622 /* We give UNSIGNEDP = 0 to expand_binop
5623 because the only operations we are expanding here are signed ones. */
5628 /* Subroutine of expand_expr:
5629 save the non-copied parts (LIST) of an expr (LHS), and return a list
5630 which can restore these values to their previous values,
5631 should something modify their storage. */
5634 save_noncopied_parts (lhs, list)
5641 for (tail = list; tail; tail = TREE_CHAIN (tail))
5642 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5643 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5646 tree part = TREE_VALUE (tail);
5647 tree part_type = TREE_TYPE (part);
5648 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5650 = assign_temp (build_qualified_type (part_type,
5651 (TYPE_QUALS (part_type)
5652 | TYPE_QUAL_CONST)),
5655 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5656 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5657 parts = tree_cons (to_be_saved,
5658 build (RTL_EXPR, part_type, NULL_TREE,
5661 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5666 /* Subroutine of expand_expr:
5667 record the non-copied parts (LIST) of an expr (LHS), and return a list
5668 which specifies the initial values of these parts. */
5671 init_noncopied_parts (lhs, list)
5678 for (tail = list; tail; tail = TREE_CHAIN (tail))
5679 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5680 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5681 else if (TREE_PURPOSE (tail))
5683 tree part = TREE_VALUE (tail);
5684 tree part_type = TREE_TYPE (part);
5685 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5686 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5691 /* Subroutine of expand_expr: return nonzero iff there is no way that
5692 EXP can reference X, which is being modified. TOP_P is nonzero if this
5693 call is going to be used to determine whether we need a temporary
5694 for EXP, as opposed to a recursive call to this function.
5696 It is always safe for this routine to return zero since it merely
5697 searches for optimization opportunities. */
5700 safe_from_p (x, exp, top_p)
5707 static tree save_expr_list;
5710 /* If EXP has varying size, we MUST use a target since we currently
5711 have no way of allocating temporaries of variable size
5712 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5713 So we assume here that something at a higher level has prevented a
5714 clash. This is somewhat bogus, but the best we can do. Only
5715 do this when X is BLKmode and when we are at the top level. */
5716 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5717 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5718 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5719 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5720 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5722 && GET_MODE (x) == BLKmode)
5723 /* If X is in the outgoing argument area, it is always safe. */
5724 || (GET_CODE (x) == MEM
5725 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5726 || (GET_CODE (XEXP (x, 0)) == PLUS
5727 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5730 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5731 find the underlying pseudo. */
5732 if (GET_CODE (x) == SUBREG)
5735 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5739 /* A SAVE_EXPR might appear many times in the expression passed to the
5740 top-level safe_from_p call, and if it has a complex subexpression,
5741 examining it multiple times could result in a combinatorial explosion.
5742 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5743 with optimization took about 28 minutes to compile -- even though it was
5744 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5745 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5746 we have processed. Note that the only test of top_p was above. */
5755 rtn = safe_from_p (x, exp, 0);
5757 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5758 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5763 /* Now look at our tree code and possibly recurse. */
5764 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5767 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5774 if (TREE_CODE (exp) == TREE_LIST)
5775 return ((TREE_VALUE (exp) == 0
5776 || safe_from_p (x, TREE_VALUE (exp), 0))
5777 && (TREE_CHAIN (exp) == 0
5778 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5779 else if (TREE_CODE (exp) == ERROR_MARK)
5780 return 1; /* An already-visited SAVE_EXPR? */
5785 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5789 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5790 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5794 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5795 the expression. If it is set, we conflict iff we are that rtx or
5796 both are in memory. Otherwise, we check all operands of the
5797 expression recursively. */
5799 switch (TREE_CODE (exp))
5802 return (staticp (TREE_OPERAND (exp, 0))
5803 || TREE_STATIC (exp)
5804 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5807 if (GET_CODE (x) == MEM
5808 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5809 get_alias_set (exp)))
5814 /* Assume that the call will clobber all hard registers and
5816 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5817 || GET_CODE (x) == MEM)
5822 /* If a sequence exists, we would have to scan every instruction
5823 in the sequence to see if it was safe. This is probably not
5825 if (RTL_EXPR_SEQUENCE (exp))
5828 exp_rtl = RTL_EXPR_RTL (exp);
5831 case WITH_CLEANUP_EXPR:
5832 exp_rtl = RTL_EXPR_RTL (exp);
5835 case CLEANUP_POINT_EXPR:
5836 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5839 exp_rtl = SAVE_EXPR_RTL (exp);
5843 /* If we've already scanned this, don't do it again. Otherwise,
5844 show we've scanned it and record for clearing the flag if we're
5846 if (TREE_PRIVATE (exp))
5849 TREE_PRIVATE (exp) = 1;
5850 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5852 TREE_PRIVATE (exp) = 0;
5856 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5860 /* The only operand we look at is operand 1. The rest aren't
5861 part of the expression. */
5862 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5864 case METHOD_CALL_EXPR:
5865 /* This takes a rtx argument, but shouldn't appear here. */
5872 /* If we have an rtx, we do not need to scan our operands. */
5876 nops = first_rtl_op (TREE_CODE (exp));
5877 for (i = 0; i < nops; i++)
5878 if (TREE_OPERAND (exp, i) != 0
5879 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5882 /* If this is a language-specific tree code, it may require
5883 special handling. */
5884 if ((unsigned int) TREE_CODE (exp)
5885 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5887 && !(*lang_safe_from_p) (x, exp))
5891 /* If we have an rtl, find any enclosed object. Then see if we conflict
5895 if (GET_CODE (exp_rtl) == SUBREG)
5897 exp_rtl = SUBREG_REG (exp_rtl);
5898 if (GET_CODE (exp_rtl) == REG
5899 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5903 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5904 are memory and they conflict. */
5905 return ! (rtx_equal_p (x, exp_rtl)
5906 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5907 && true_dependence (exp_rtl, GET_MODE (x), x,
5908 rtx_addr_varies_p)));
5911 /* If we reach here, it is safe. */
5915 /* Subroutine of expand_expr: return nonzero iff EXP is an
5916 expression whose type is statically determinable. */
5922 if (TREE_CODE (exp) == PARM_DECL
5923 || TREE_CODE (exp) == VAR_DECL
5924 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5925 || TREE_CODE (exp) == COMPONENT_REF
5926 || TREE_CODE (exp) == ARRAY_REF)
5931 /* Subroutine of expand_expr: return rtx if EXP is a
5932 variable or parameter; else return 0. */
5939 switch (TREE_CODE (exp))
5943 return DECL_RTL (exp);
5949 #ifdef MAX_INTEGER_COMPUTATION_MODE
5952 check_max_integer_computation_mode (exp)
5955 enum tree_code code;
5956 enum machine_mode mode;
5958 /* Strip any NOPs that don't change the mode. */
5960 code = TREE_CODE (exp);
5962 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5963 if (code == NOP_EXPR
5964 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5967 /* First check the type of the overall operation. We need only look at
5968 unary, binary and relational operations. */
5969 if (TREE_CODE_CLASS (code) == '1'
5970 || TREE_CODE_CLASS (code) == '2'
5971 || TREE_CODE_CLASS (code) == '<')
5973 mode = TYPE_MODE (TREE_TYPE (exp));
5974 if (GET_MODE_CLASS (mode) == MODE_INT
5975 && mode > MAX_INTEGER_COMPUTATION_MODE)
5976 internal_error ("unsupported wide integer operation");
5979 /* Check operand of a unary op. */
5980 if (TREE_CODE_CLASS (code) == '1')
5982 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5983 if (GET_MODE_CLASS (mode) == MODE_INT
5984 && mode > MAX_INTEGER_COMPUTATION_MODE)
5985 internal_error ("unsupported wide integer operation");
5988 /* Check operands of a binary/comparison op. */
5989 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5991 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5992 if (GET_MODE_CLASS (mode) == MODE_INT
5993 && mode > MAX_INTEGER_COMPUTATION_MODE)
5994 internal_error ("unsupported wide integer operation");
5996 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5997 if (GET_MODE_CLASS (mode) == MODE_INT
5998 && mode > MAX_INTEGER_COMPUTATION_MODE)
5999 internal_error ("unsupported wide integer operation");
6004 /* expand_expr: generate code for computing expression EXP.
6005 An rtx for the computed value is returned. The value is never null.
6006 In the case of a void EXP, const0_rtx is returned.
6008 The value may be stored in TARGET if TARGET is nonzero.
6009 TARGET is just a suggestion; callers must assume that
6010 the rtx returned may not be the same as TARGET.
6012 If TARGET is CONST0_RTX, it means that the value will be ignored.
6014 If TMODE is not VOIDmode, it suggests generating the
6015 result in mode TMODE. But this is done only when convenient.
6016 Otherwise, TMODE is ignored and the value generated in its natural mode.
6017 TMODE is just a suggestion; callers must assume that
6018 the rtx returned may not have mode TMODE.
6020 Note that TARGET may have neither TMODE nor MODE. In that case, it
6021 probably will not be used.
6023 If MODIFIER is EXPAND_SUM then when EXP is an addition
6024 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6025 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6026 products as above, or REG or MEM, or constant.
6027 Ordinarily in such cases we would output mul or add instructions
6028 and then return a pseudo reg containing the sum.
6030 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6031 it also marks a label as absolutely required (it can't be dead).
6032 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6033 This is used for outputting expressions used in initializers.
6035 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6036 with a constant address even if that address is not normally legitimate.
6037 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6040 expand_expr (exp, target, tmode, modifier)
6043 enum machine_mode tmode;
6044 enum expand_modifier modifier;
6046 register rtx op0, op1, temp;
6047 tree type = TREE_TYPE (exp);
6048 int unsignedp = TREE_UNSIGNED (type);
6049 register enum machine_mode mode;
6050 register enum tree_code code = TREE_CODE (exp);
6052 rtx subtarget, original_target;
6055 /* Used by check-memory-usage to make modifier read only. */
6056 enum expand_modifier ro_modifier;
6058 /* Handle ERROR_MARK before anybody tries to access its type. */
6059 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6061 op0 = CONST0_RTX (tmode);
6067 mode = TYPE_MODE (type);
6068 /* Use subtarget as the target for operand 0 of a binary operation. */
6069 subtarget = get_subtarget (target);
6070 original_target = target;
6071 ignore = (target == const0_rtx
6072 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6073 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6074 || code == COND_EXPR)
6075 && TREE_CODE (type) == VOID_TYPE));
6077 /* Make a read-only version of the modifier. */
6078 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6079 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6080 ro_modifier = modifier;
6082 ro_modifier = EXPAND_NORMAL;
6084 /* If we are going to ignore this result, we need only do something
6085 if there is a side-effect somewhere in the expression. If there
6086 is, short-circuit the most common cases here. Note that we must
6087 not call expand_expr with anything but const0_rtx in case this
6088 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6092 if (! TREE_SIDE_EFFECTS (exp))
6095 /* Ensure we reference a volatile object even if value is ignored, but
6096 don't do this if all we are doing is taking its address. */
6097 if (TREE_THIS_VOLATILE (exp)
6098 && TREE_CODE (exp) != FUNCTION_DECL
6099 && mode != VOIDmode && mode != BLKmode
6100 && modifier != EXPAND_CONST_ADDRESS)
6102 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6103 if (GET_CODE (temp) == MEM)
6104 temp = copy_to_reg (temp);
6108 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6109 || code == INDIRECT_REF || code == BUFFER_REF)
6110 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6111 VOIDmode, ro_modifier);
6112 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6113 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6115 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6117 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6121 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6122 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6123 /* If the second operand has no side effects, just evaluate
6125 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6126 VOIDmode, ro_modifier);
6127 else if (code == BIT_FIELD_REF)
6129 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6131 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6133 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6141 #ifdef MAX_INTEGER_COMPUTATION_MODE
6142 /* Only check stuff here if the mode we want is different from the mode
6143 of the expression; if it's the same, check_max_integer_computiation_mode
6144 will handle it. Do we really need to check this stuff at all? */
6147 && GET_MODE (target) != mode
6148 && TREE_CODE (exp) != INTEGER_CST
6149 && TREE_CODE (exp) != PARM_DECL
6150 && TREE_CODE (exp) != ARRAY_REF
6151 && TREE_CODE (exp) != ARRAY_RANGE_REF
6152 && TREE_CODE (exp) != COMPONENT_REF
6153 && TREE_CODE (exp) != BIT_FIELD_REF
6154 && TREE_CODE (exp) != INDIRECT_REF
6155 && TREE_CODE (exp) != CALL_EXPR
6156 && TREE_CODE (exp) != VAR_DECL
6157 && TREE_CODE (exp) != RTL_EXPR)
6159 enum machine_mode mode = GET_MODE (target);
6161 if (GET_MODE_CLASS (mode) == MODE_INT
6162 && mode > MAX_INTEGER_COMPUTATION_MODE)
6163 internal_error ("unsupported wide integer operation");
6167 && TREE_CODE (exp) != INTEGER_CST
6168 && TREE_CODE (exp) != PARM_DECL
6169 && TREE_CODE (exp) != ARRAY_REF
6170 && TREE_CODE (exp) != ARRAY_RANGE_REF
6171 && TREE_CODE (exp) != COMPONENT_REF
6172 && TREE_CODE (exp) != BIT_FIELD_REF
6173 && TREE_CODE (exp) != INDIRECT_REF
6174 && TREE_CODE (exp) != VAR_DECL
6175 && TREE_CODE (exp) != CALL_EXPR
6176 && TREE_CODE (exp) != RTL_EXPR
6177 && GET_MODE_CLASS (tmode) == MODE_INT
6178 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6179 internal_error ("unsupported wide integer operation");
6181 check_max_integer_computation_mode (exp);
6184 /* If will do cse, generate all results into pseudo registers
6185 since 1) that allows cse to find more things
6186 and 2) otherwise cse could produce an insn the machine
6189 if (! cse_not_expected && mode != BLKmode && target
6190 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6197 tree function = decl_function_context (exp);
6198 /* Handle using a label in a containing function. */
6199 if (function != current_function_decl
6200 && function != inline_function_decl && function != 0)
6202 struct function *p = find_function_data (function);
6203 p->expr->x_forced_labels
6204 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6205 p->expr->x_forced_labels);
6209 if (modifier == EXPAND_INITIALIZER)
6210 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6215 temp = gen_rtx_MEM (FUNCTION_MODE,
6216 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6217 if (function != current_function_decl
6218 && function != inline_function_decl && function != 0)
6219 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6224 if (DECL_RTL (exp) == 0)
6226 error_with_decl (exp, "prior parameter's size depends on `%s'");
6227 return CONST0_RTX (mode);
6230 /* ... fall through ... */
6233 /* If a static var's type was incomplete when the decl was written,
6234 but the type is complete now, lay out the decl now. */
6235 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6236 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6238 layout_decl (exp, 0);
6239 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6242 /* Although static-storage variables start off initialized, according to
6243 ANSI C, a memcpy could overwrite them with uninitialized values. So
6244 we check them too. This also lets us check for read-only variables
6245 accessed via a non-const declaration, in case it won't be detected
6246 any other way (e.g., in an embedded system or OS kernel without
6249 Aggregates are not checked here; they're handled elsewhere. */
6250 if (cfun && current_function_check_memory_usage
6252 && GET_CODE (DECL_RTL (exp)) == MEM
6253 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6255 enum memory_use_mode memory_usage;
6256 memory_usage = get_memory_usage_from_modifier (modifier);
6258 in_check_memory_usage = 1;
6259 if (memory_usage != MEMORY_USE_DONT)
6260 emit_library_call (chkr_check_addr_libfunc,
6261 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6262 XEXP (DECL_RTL (exp), 0), Pmode,
6263 GEN_INT (int_size_in_bytes (type)),
6264 TYPE_MODE (sizetype),
6265 GEN_INT (memory_usage),
6266 TYPE_MODE (integer_type_node));
6267 in_check_memory_usage = 0;
6270 /* ... fall through ... */
6274 if (DECL_RTL (exp) == 0)
6277 /* Ensure variable marked as used even if it doesn't go through
6278 a parser. If it hasn't be used yet, write out an external
6280 if (! TREE_USED (exp))
6282 assemble_external (exp);
6283 TREE_USED (exp) = 1;
6286 /* Show we haven't gotten RTL for this yet. */
6289 /* Handle variables inherited from containing functions. */
6290 context = decl_function_context (exp);
6292 /* We treat inline_function_decl as an alias for the current function
6293 because that is the inline function whose vars, types, etc.
6294 are being merged into the current function.
6295 See expand_inline_function. */
6297 if (context != 0 && context != current_function_decl
6298 && context != inline_function_decl
6299 /* If var is static, we don't need a static chain to access it. */
6300 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6301 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6305 /* Mark as non-local and addressable. */
6306 DECL_NONLOCAL (exp) = 1;
6307 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6309 mark_addressable (exp);
6310 if (GET_CODE (DECL_RTL (exp)) != MEM)
6312 addr = XEXP (DECL_RTL (exp), 0);
6313 if (GET_CODE (addr) == MEM)
6314 addr = change_address (addr, Pmode,
6315 fix_lexical_addr (XEXP (addr, 0), exp));
6317 addr = fix_lexical_addr (addr, exp);
6319 temp = change_address (DECL_RTL (exp), mode, addr);
6322 /* This is the case of an array whose size is to be determined
6323 from its initializer, while the initializer is still being parsed.
6326 else if (GET_CODE (DECL_RTL (exp)) == MEM
6327 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6328 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6329 XEXP (DECL_RTL (exp), 0));
6331 /* If DECL_RTL is memory, we are in the normal case and either
6332 the address is not valid or it is not a register and -fforce-addr
6333 is specified, get the address into a register. */
6335 else if (GET_CODE (DECL_RTL (exp)) == MEM
6336 && modifier != EXPAND_CONST_ADDRESS
6337 && modifier != EXPAND_SUM
6338 && modifier != EXPAND_INITIALIZER
6339 && (! memory_address_p (DECL_MODE (exp),
6340 XEXP (DECL_RTL (exp), 0))
6342 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6343 temp = change_address (DECL_RTL (exp), VOIDmode,
6344 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6346 /* If we got something, return it. But first, set the alignment
6347 if the address is a register. */
6350 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6351 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6356 /* If the mode of DECL_RTL does not match that of the decl, it
6357 must be a promoted value. We return a SUBREG of the wanted mode,
6358 but mark it so that we know that it was already extended. */
6360 if (GET_CODE (DECL_RTL (exp)) == REG
6361 && GET_MODE (DECL_RTL (exp)) != mode)
6363 /* Get the signedness used for this variable. Ensure we get the
6364 same mode we got when the variable was declared. */
6365 if (GET_MODE (DECL_RTL (exp))
6366 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6369 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6370 SUBREG_PROMOTED_VAR_P (temp) = 1;
6371 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6375 return DECL_RTL (exp);
6378 return immed_double_const (TREE_INT_CST_LOW (exp),
6379 TREE_INT_CST_HIGH (exp), mode);
6382 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6383 EXPAND_MEMORY_USE_BAD);
6386 /* If optimized, generate immediate CONST_DOUBLE
6387 which will be turned into memory by reload if necessary.
6389 We used to force a register so that loop.c could see it. But
6390 this does not allow gen_* patterns to perform optimizations with
6391 the constants. It also produces two insns in cases like "x = 1.0;".
6392 On most machines, floating-point constants are not permitted in
6393 many insns, so we'd end up copying it to a register in any case.
6395 Now, we do the copying in expand_binop, if appropriate. */
6396 return immed_real_const (exp);
6400 if (! TREE_CST_RTL (exp))
6401 output_constant_def (exp, 1);
6403 /* TREE_CST_RTL probably contains a constant address.
6404 On RISC machines where a constant address isn't valid,
6405 make some insns to get that address into a register. */
6406 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6407 && modifier != EXPAND_CONST_ADDRESS
6408 && modifier != EXPAND_INITIALIZER
6409 && modifier != EXPAND_SUM
6410 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6412 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6413 return change_address (TREE_CST_RTL (exp), VOIDmode,
6414 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6415 return TREE_CST_RTL (exp);
6417 case EXPR_WITH_FILE_LOCATION:
6420 const char *saved_input_filename = input_filename;
6421 int saved_lineno = lineno;
6422 input_filename = EXPR_WFL_FILENAME (exp);
6423 lineno = EXPR_WFL_LINENO (exp);
6424 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6425 emit_line_note (input_filename, lineno);
6426 /* Possibly avoid switching back and force here. */
6427 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6428 input_filename = saved_input_filename;
6429 lineno = saved_lineno;
6434 context = decl_function_context (exp);
6436 /* If this SAVE_EXPR was at global context, assume we are an
6437 initialization function and move it into our context. */
6439 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6441 /* We treat inline_function_decl as an alias for the current function
6442 because that is the inline function whose vars, types, etc.
6443 are being merged into the current function.
6444 See expand_inline_function. */
6445 if (context == current_function_decl || context == inline_function_decl)
6448 /* If this is non-local, handle it. */
6451 /* The following call just exists to abort if the context is
6452 not of a containing function. */
6453 find_function_data (context);
6455 temp = SAVE_EXPR_RTL (exp);
6456 if (temp && GET_CODE (temp) == REG)
6458 put_var_into_stack (exp);
6459 temp = SAVE_EXPR_RTL (exp);
6461 if (temp == 0 || GET_CODE (temp) != MEM)
6463 return change_address (temp, mode,
6464 fix_lexical_addr (XEXP (temp, 0), exp));
6466 if (SAVE_EXPR_RTL (exp) == 0)
6468 if (mode == VOIDmode)
6471 temp = assign_temp (build_qualified_type (type,
6473 | TYPE_QUAL_CONST)),
6476 SAVE_EXPR_RTL (exp) = temp;
6477 if (!optimize && GET_CODE (temp) == REG)
6478 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6481 /* If the mode of TEMP does not match that of the expression, it
6482 must be a promoted value. We pass store_expr a SUBREG of the
6483 wanted mode but mark it so that we know that it was already
6484 extended. Note that `unsignedp' was modified above in
6487 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6489 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6490 SUBREG_PROMOTED_VAR_P (temp) = 1;
6491 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6494 if (temp == const0_rtx)
6495 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6496 EXPAND_MEMORY_USE_BAD);
6498 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6500 TREE_USED (exp) = 1;
6503 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6504 must be a promoted value. We return a SUBREG of the wanted mode,
6505 but mark it so that we know that it was already extended. */
6507 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6508 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6510 /* Compute the signedness and make the proper SUBREG. */
6511 promote_mode (type, mode, &unsignedp, 0);
6512 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6513 SUBREG_PROMOTED_VAR_P (temp) = 1;
6514 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6518 return SAVE_EXPR_RTL (exp);
6523 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6524 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6528 case PLACEHOLDER_EXPR:
6530 tree placeholder_expr;
6532 /* If there is an object on the head of the placeholder list,
6533 see if some object in it of type TYPE or a pointer to it. For
6534 further information, see tree.def. */
6535 for (placeholder_expr = placeholder_list;
6536 placeholder_expr != 0;
6537 placeholder_expr = TREE_CHAIN (placeholder_expr))
6539 tree need_type = TYPE_MAIN_VARIANT (type);
6541 tree old_list = placeholder_list;
6544 /* Find the outermost reference that is of the type we want.
6545 If none, see if any object has a type that is a pointer to
6546 the type we want. */
6547 for (elt = TREE_PURPOSE (placeholder_expr);
6548 elt != 0 && object == 0;
6550 = ((TREE_CODE (elt) == COMPOUND_EXPR
6551 || TREE_CODE (elt) == COND_EXPR)
6552 ? TREE_OPERAND (elt, 1)
6553 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6554 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6555 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6556 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6557 ? TREE_OPERAND (elt, 0) : 0))
6558 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6561 for (elt = TREE_PURPOSE (placeholder_expr);
6562 elt != 0 && object == 0;
6564 = ((TREE_CODE (elt) == COMPOUND_EXPR
6565 || TREE_CODE (elt) == COND_EXPR)
6566 ? TREE_OPERAND (elt, 1)
6567 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6568 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6569 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6570 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6571 ? TREE_OPERAND (elt, 0) : 0))
6572 if (POINTER_TYPE_P (TREE_TYPE (elt))
6573 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6575 object = build1 (INDIRECT_REF, need_type, elt);
6579 /* Expand this object skipping the list entries before
6580 it was found in case it is also a PLACEHOLDER_EXPR.
6581 In that case, we want to translate it using subsequent
6583 placeholder_list = TREE_CHAIN (placeholder_expr);
6584 temp = expand_expr (object, original_target, tmode,
6586 placeholder_list = old_list;
6592 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6595 case WITH_RECORD_EXPR:
6596 /* Put the object on the placeholder list, expand our first operand,
6597 and pop the list. */
6598 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6600 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6601 tmode, ro_modifier);
6602 placeholder_list = TREE_CHAIN (placeholder_list);
6606 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6607 expand_goto (TREE_OPERAND (exp, 0));
6609 expand_computed_goto (TREE_OPERAND (exp, 0));
6613 expand_exit_loop_if_false (NULL,
6614 invert_truthvalue (TREE_OPERAND (exp, 0)));
6617 case LABELED_BLOCK_EXPR:
6618 if (LABELED_BLOCK_BODY (exp))
6619 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6620 /* Should perhaps use expand_label, but this is simpler and safer. */
6621 do_pending_stack_adjust ();
6622 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6625 case EXIT_BLOCK_EXPR:
6626 if (EXIT_BLOCK_RETURN (exp))
6627 sorry ("returned value in block_exit_expr");
6628 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6633 expand_start_loop (1);
6634 expand_expr_stmt (TREE_OPERAND (exp, 0));
6642 tree vars = TREE_OPERAND (exp, 0);
6643 int vars_need_expansion = 0;
6645 /* Need to open a binding contour here because
6646 if there are any cleanups they must be contained here. */
6647 expand_start_bindings (2);
6649 /* Mark the corresponding BLOCK for output in its proper place. */
6650 if (TREE_OPERAND (exp, 2) != 0
6651 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6652 insert_block (TREE_OPERAND (exp, 2));
6654 /* If VARS have not yet been expanded, expand them now. */
6657 if (!DECL_RTL_SET_P (vars))
6659 vars_need_expansion = 1;
6662 expand_decl_init (vars);
6663 vars = TREE_CHAIN (vars);
6666 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6668 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6674 if (RTL_EXPR_SEQUENCE (exp))
6676 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6678 emit_insns (RTL_EXPR_SEQUENCE (exp));
6679 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6681 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6682 free_temps_for_rtl_expr (exp);
6683 return RTL_EXPR_RTL (exp);
6686 /* If we don't need the result, just ensure we evaluate any
6691 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6692 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6693 EXPAND_MEMORY_USE_BAD);
6697 /* All elts simple constants => refer to a constant in memory. But
6698 if this is a non-BLKmode mode, let it store a field at a time
6699 since that should make a CONST_INT or CONST_DOUBLE when we
6700 fold. Likewise, if we have a target we can use, it is best to
6701 store directly into the target unless the type is large enough
6702 that memcpy will be used. If we are making an initializer and
6703 all operands are constant, put it in memory as well. */
6704 else if ((TREE_STATIC (exp)
6705 && ((mode == BLKmode
6706 && ! (target != 0 && safe_from_p (target, exp, 1)))
6707 || TREE_ADDRESSABLE (exp)
6708 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6709 && (! MOVE_BY_PIECES_P
6710 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6712 && ! mostly_zeros_p (exp))))
6713 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6715 rtx constructor = output_constant_def (exp, 1);
6717 if (modifier != EXPAND_CONST_ADDRESS
6718 && modifier != EXPAND_INITIALIZER
6719 && modifier != EXPAND_SUM
6720 && (! memory_address_p (GET_MODE (constructor),
6721 XEXP (constructor, 0))
6723 && GET_CODE (XEXP (constructor, 0)) != REG)))
6724 constructor = change_address (constructor, VOIDmode,
6725 XEXP (constructor, 0));
6730 /* Handle calls that pass values in multiple non-contiguous
6731 locations. The Irix 6 ABI has examples of this. */
6732 if (target == 0 || ! safe_from_p (target, exp, 1)
6733 || GET_CODE (target) == PARALLEL)
6735 = assign_temp (build_qualified_type (type,
6737 | (TREE_READONLY (exp)
6738 * TYPE_QUAL_CONST))),
6739 TREE_ADDRESSABLE (exp), 1, 1);
6741 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6742 int_size_in_bytes (TREE_TYPE (exp)));
6748 tree exp1 = TREE_OPERAND (exp, 0);
6750 tree string = string_constant (exp1, &index);
6752 /* Try to optimize reads from const strings. */
6754 && TREE_CODE (string) == STRING_CST
6755 && TREE_CODE (index) == INTEGER_CST
6756 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6757 && GET_MODE_CLASS (mode) == MODE_INT
6758 && GET_MODE_SIZE (mode) == 1
6759 && modifier != EXPAND_MEMORY_USE_WO)
6761 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6763 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6764 op0 = memory_address (mode, op0);
6766 if (cfun && current_function_check_memory_usage
6767 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6769 enum memory_use_mode memory_usage;
6770 memory_usage = get_memory_usage_from_modifier (modifier);
6772 if (memory_usage != MEMORY_USE_DONT)
6774 in_check_memory_usage = 1;
6775 emit_library_call (chkr_check_addr_libfunc,
6776 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6777 Pmode, GEN_INT (int_size_in_bytes (type)),
6778 TYPE_MODE (sizetype),
6779 GEN_INT (memory_usage),
6780 TYPE_MODE (integer_type_node));
6781 in_check_memory_usage = 0;
6785 temp = gen_rtx_MEM (mode, op0);
6786 set_mem_attributes (temp, exp, 0);
6788 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6789 here, because, in C and C++, the fact that a location is accessed
6790 through a pointer to const does not mean that the value there can
6791 never change. Languages where it can never change should
6792 also set TREE_STATIC. */
6793 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6795 /* If we are writing to this object and its type is a record with
6796 readonly fields, we must mark it as readonly so it will
6797 conflict with readonly references to those fields. */
6798 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6799 RTX_UNCHANGING_P (temp) = 1;
6805 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6809 tree array = TREE_OPERAND (exp, 0);
6810 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6811 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6812 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6815 /* Optimize the special-case of a zero lower bound.
6817 We convert the low_bound to sizetype to avoid some problems
6818 with constant folding. (E.g. suppose the lower bound is 1,
6819 and its mode is QI. Without the conversion, (ARRAY
6820 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6821 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6823 if (! integer_zerop (low_bound))
6824 index = size_diffop (index, convert (sizetype, low_bound));
6826 /* Fold an expression like: "foo"[2].
6827 This is not done in fold so it won't happen inside &.
6828 Don't fold if this is for wide characters since it's too
6829 difficult to do correctly and this is a very rare case. */
6831 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6832 && TREE_CODE (array) == STRING_CST
6833 && TREE_CODE (index) == INTEGER_CST
6834 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6835 && GET_MODE_CLASS (mode) == MODE_INT
6836 && GET_MODE_SIZE (mode) == 1)
6838 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6840 /* If this is a constant index into a constant array,
6841 just get the value from the array. Handle both the cases when
6842 we have an explicit constructor and when our operand is a variable
6843 that was declared const. */
6845 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6846 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6847 && TREE_CODE (index) == INTEGER_CST
6848 && 0 > compare_tree_int (index,
6849 list_length (CONSTRUCTOR_ELTS
6850 (TREE_OPERAND (exp, 0)))))
6854 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6855 i = TREE_INT_CST_LOW (index);
6856 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6860 return expand_expr (fold (TREE_VALUE (elem)), target,
6861 tmode, ro_modifier);
6864 else if (optimize >= 1
6865 && modifier != EXPAND_CONST_ADDRESS
6866 && modifier != EXPAND_INITIALIZER
6867 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6868 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6869 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6871 if (TREE_CODE (index) == INTEGER_CST)
6873 tree init = DECL_INITIAL (array);
6875 if (TREE_CODE (init) == CONSTRUCTOR)
6879 for (elem = CONSTRUCTOR_ELTS (init);
6881 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6882 elem = TREE_CHAIN (elem))
6885 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6886 return expand_expr (fold (TREE_VALUE (elem)), target,
6887 tmode, ro_modifier);
6889 else if (TREE_CODE (init) == STRING_CST
6890 && 0 > compare_tree_int (index,
6891 TREE_STRING_LENGTH (init)))
6893 tree type = TREE_TYPE (TREE_TYPE (init));
6894 enum machine_mode mode = TYPE_MODE (type);
6896 if (GET_MODE_CLASS (mode) == MODE_INT
6897 && GET_MODE_SIZE (mode) == 1)
6899 (TREE_STRING_POINTER
6900 (init)[TREE_INT_CST_LOW (index)]));
6909 case ARRAY_RANGE_REF:
6910 /* If the operand is a CONSTRUCTOR, we can just extract the
6911 appropriate field if it is present. Don't do this if we have
6912 already written the data since we want to refer to that copy
6913 and varasm.c assumes that's what we'll do. */
6914 if (code == COMPONENT_REF
6915 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6916 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6920 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6921 elt = TREE_CHAIN (elt))
6922 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6923 /* We can normally use the value of the field in the
6924 CONSTRUCTOR. However, if this is a bitfield in
6925 an integral mode that we can fit in a HOST_WIDE_INT,
6926 we must mask only the number of bits in the bitfield,
6927 since this is done implicitly by the constructor. If
6928 the bitfield does not meet either of those conditions,
6929 we can't do this optimization. */
6930 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6931 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6933 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6934 <= HOST_BITS_PER_WIDE_INT))))
6936 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6937 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6939 HOST_WIDE_INT bitsize
6940 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6942 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6944 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6945 op0 = expand_and (op0, op1, target);
6949 enum machine_mode imode
6950 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6952 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6955 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6957 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6967 enum machine_mode mode1;
6968 HOST_WIDE_INT bitsize, bitpos;
6971 unsigned int alignment;
6972 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6973 &mode1, &unsignedp, &volatilep,
6976 /* If we got back the original object, something is wrong. Perhaps
6977 we are evaluating an expression too early. In any event, don't
6978 infinitely recurse. */
6982 /* If TEM's type is a union of variable size, pass TARGET to the inner
6983 computation, since it will need a temporary and TARGET is known
6984 to have to do. This occurs in unchecked conversion in Ada. */
6986 op0 = expand_expr (tem,
6987 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6988 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6990 ? target : NULL_RTX),
6992 (modifier == EXPAND_INITIALIZER
6993 || modifier == EXPAND_CONST_ADDRESS)
6994 ? modifier : EXPAND_NORMAL);
6996 /* If this is a constant, put it into a register if it is a
6997 legitimate constant and OFFSET is 0 and memory if it isn't. */
6998 if (CONSTANT_P (op0))
7000 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7001 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7003 op0 = force_reg (mode, op0);
7005 op0 = validize_mem (force_const_mem (mode, op0));
7010 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7012 /* If this object is in a register, put it into memory.
7013 This case can't occur in C, but can in Ada if we have
7014 unchecked conversion of an expression from a scalar type to
7015 an array or record type. */
7016 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7017 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7019 /* If the operand is a SAVE_EXPR, we can deal with this by
7020 forcing the SAVE_EXPR into memory. */
7021 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7023 put_var_into_stack (TREE_OPERAND (exp, 0));
7024 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7029 = build_qualified_type (TREE_TYPE (tem),
7030 (TYPE_QUALS (TREE_TYPE (tem))
7031 | TYPE_QUAL_CONST));
7032 rtx memloc = assign_temp (nt, 1, 1, 1);
7034 mark_temp_addr_taken (memloc);
7035 emit_move_insn (memloc, op0);
7040 if (GET_CODE (op0) != MEM)
7043 if (GET_MODE (offset_rtx) != ptr_mode)
7045 #ifdef POINTERS_EXTEND_UNSIGNED
7046 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7048 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7052 /* A constant address in OP0 can have VOIDmode, we must not try
7053 to call force_reg for that case. Avoid that case. */
7054 if (GET_CODE (op0) == MEM
7055 && GET_MODE (op0) == BLKmode
7056 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7058 && (bitpos % bitsize) == 0
7059 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7060 && alignment == GET_MODE_ALIGNMENT (mode1))
7062 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7064 if (GET_CODE (XEXP (temp, 0)) == REG)
7067 op0 = change_address (op0, mode1,
7068 force_reg (GET_MODE (XEXP (temp, 0)),
7073 op0 = change_address (op0, VOIDmode,
7074 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7075 force_reg (ptr_mode,
7079 /* Don't forget about volatility even if this is a bitfield. */
7080 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7082 op0 = copy_rtx (op0);
7083 MEM_VOLATILE_P (op0) = 1;
7086 /* Check the access. */
7087 if (cfun != 0 && current_function_check_memory_usage
7088 && GET_CODE (op0) == MEM)
7090 enum memory_use_mode memory_usage;
7091 memory_usage = get_memory_usage_from_modifier (modifier);
7093 if (memory_usage != MEMORY_USE_DONT)
7098 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7099 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7101 /* Check the access right of the pointer. */
7102 in_check_memory_usage = 1;
7103 if (size > BITS_PER_UNIT)
7104 emit_library_call (chkr_check_addr_libfunc,
7105 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7106 Pmode, GEN_INT (size / BITS_PER_UNIT),
7107 TYPE_MODE (sizetype),
7108 GEN_INT (memory_usage),
7109 TYPE_MODE (integer_type_node));
7110 in_check_memory_usage = 0;
7114 /* In cases where an aligned union has an unaligned object
7115 as a field, we might be extracting a BLKmode value from
7116 an integer-mode (e.g., SImode) object. Handle this case
7117 by doing the extract into an object as wide as the field
7118 (which we know to be the width of a basic mode), then
7119 storing into memory, and changing the mode to BLKmode. */
7120 if (mode1 == VOIDmode
7121 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7122 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7123 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7124 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
7125 /* If the field isn't aligned enough to fetch as a memref,
7126 fetch it as a bit field. */
7127 || (mode1 != BLKmode
7128 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7129 && ((TYPE_ALIGN (TREE_TYPE (tem))
7130 < GET_MODE_ALIGNMENT (mode))
7131 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7132 /* If the type and the field are a constant size and the
7133 size of the type isn't the same size as the bitfield,
7134 we must use bitfield operations. */
7136 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7138 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7141 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7142 && (TYPE_ALIGN (type) > alignment
7143 || bitpos % TYPE_ALIGN (type) != 0)))
7145 enum machine_mode ext_mode = mode;
7147 if (ext_mode == BLKmode
7148 && ! (target != 0 && GET_CODE (op0) == MEM
7149 && GET_CODE (target) == MEM
7150 && bitpos % BITS_PER_UNIT == 0))
7151 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7153 if (ext_mode == BLKmode)
7155 /* In this case, BITPOS must start at a byte boundary and
7156 TARGET, if specified, must be a MEM. */
7157 if (GET_CODE (op0) != MEM
7158 || (target != 0 && GET_CODE (target) != MEM)
7159 || bitpos % BITS_PER_UNIT != 0)
7162 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7164 target = assign_temp (type, 0, 1, 1);
7166 emit_block_move (target, op0,
7167 bitsize == -1 ? expr_size (exp)
7168 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7175 op0 = validize_mem (op0);
7177 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7178 mark_reg_pointer (XEXP (op0, 0), alignment);
7180 op0 = extract_bit_field (op0, bitsize, bitpos,
7181 unsignedp, target, ext_mode, ext_mode,
7183 int_size_in_bytes (TREE_TYPE (tem)));
7185 /* If the result is a record type and BITSIZE is narrower than
7186 the mode of OP0, an integral mode, and this is a big endian
7187 machine, we must put the field into the high-order bits. */
7188 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7189 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7190 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7191 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7192 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7196 if (mode == BLKmode)
7198 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7200 rtx new = assign_temp (nt, 0, 1, 1);
7202 emit_move_insn (new, op0);
7203 op0 = copy_rtx (new);
7204 PUT_MODE (op0, BLKmode);
7210 /* If the result is BLKmode, use that to access the object
7212 if (mode == BLKmode)
7215 /* Get a reference to just this component. */
7216 if (modifier == EXPAND_CONST_ADDRESS
7217 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7219 rtx new = gen_rtx_MEM (mode1,
7220 plus_constant (XEXP (op0, 0),
7221 (bitpos / BITS_PER_UNIT)));
7223 MEM_COPY_ATTRIBUTES (new, op0);
7227 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7229 set_mem_attributes (op0, exp, 0);
7230 if (GET_CODE (XEXP (op0, 0)) == REG)
7231 mark_reg_pointer (XEXP (op0, 0), alignment);
7233 MEM_VOLATILE_P (op0) |= volatilep;
7234 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7235 || modifier == EXPAND_CONST_ADDRESS
7236 || modifier == EXPAND_INITIALIZER)
7238 else if (target == 0)
7239 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7241 convert_move (target, op0, unsignedp);
7245 /* Intended for a reference to a buffer of a file-object in Pascal.
7246 But it's not certain that a special tree code will really be
7247 necessary for these. INDIRECT_REF might work for them. */
7253 /* Pascal set IN expression.
7256 rlo = set_low - (set_low%bits_per_word);
7257 the_word = set [ (index - rlo)/bits_per_word ];
7258 bit_index = index % bits_per_word;
7259 bitmask = 1 << bit_index;
7260 return !!(the_word & bitmask); */
7262 tree set = TREE_OPERAND (exp, 0);
7263 tree index = TREE_OPERAND (exp, 1);
7264 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7265 tree set_type = TREE_TYPE (set);
7266 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7267 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7268 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7269 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7270 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7271 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7272 rtx setaddr = XEXP (setval, 0);
7273 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7275 rtx diff, quo, rem, addr, bit, result;
7277 /* If domain is empty, answer is no. Likewise if index is constant
7278 and out of bounds. */
7279 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7280 && TREE_CODE (set_low_bound) == INTEGER_CST
7281 && tree_int_cst_lt (set_high_bound, set_low_bound))
7282 || (TREE_CODE (index) == INTEGER_CST
7283 && TREE_CODE (set_low_bound) == INTEGER_CST
7284 && tree_int_cst_lt (index, set_low_bound))
7285 || (TREE_CODE (set_high_bound) == INTEGER_CST
7286 && TREE_CODE (index) == INTEGER_CST
7287 && tree_int_cst_lt (set_high_bound, index))))
7291 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7293 /* If we get here, we have to generate the code for both cases
7294 (in range and out of range). */
7296 op0 = gen_label_rtx ();
7297 op1 = gen_label_rtx ();
7299 if (! (GET_CODE (index_val) == CONST_INT
7300 && GET_CODE (lo_r) == CONST_INT))
7302 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7303 GET_MODE (index_val), iunsignedp, 0, op1);
7306 if (! (GET_CODE (index_val) == CONST_INT
7307 && GET_CODE (hi_r) == CONST_INT))
7309 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7310 GET_MODE (index_val), iunsignedp, 0, op1);
7313 /* Calculate the element number of bit zero in the first word
7315 if (GET_CODE (lo_r) == CONST_INT)
7316 rlow = GEN_INT (INTVAL (lo_r)
7317 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7319 rlow = expand_binop (index_mode, and_optab, lo_r,
7320 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7321 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7323 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7324 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7326 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7327 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7328 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7329 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7331 addr = memory_address (byte_mode,
7332 expand_binop (index_mode, add_optab, diff,
7333 setaddr, NULL_RTX, iunsignedp,
7336 /* Extract the bit we want to examine. */
7337 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7338 gen_rtx_MEM (byte_mode, addr),
7339 make_tree (TREE_TYPE (index), rem),
7341 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7342 GET_MODE (target) == byte_mode ? target : 0,
7343 1, OPTAB_LIB_WIDEN);
7345 if (result != target)
7346 convert_move (target, result, 1);
7348 /* Output the code to handle the out-of-range case. */
7351 emit_move_insn (target, const0_rtx);
7356 case WITH_CLEANUP_EXPR:
7357 if (RTL_EXPR_RTL (exp) == 0)
7360 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7361 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7363 /* That's it for this cleanup. */
7364 TREE_OPERAND (exp, 2) = 0;
7366 return RTL_EXPR_RTL (exp);
7368 case CLEANUP_POINT_EXPR:
7370 /* Start a new binding layer that will keep track of all cleanup
7371 actions to be performed. */
7372 expand_start_bindings (2);
7374 target_temp_slot_level = temp_slot_level;
7376 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7377 /* If we're going to use this value, load it up now. */
7379 op0 = force_not_mem (op0);
7380 preserve_temp_slots (op0);
7381 expand_end_bindings (NULL_TREE, 0, 0);
7386 /* Check for a built-in function. */
7387 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7388 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7390 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7392 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7393 == BUILT_IN_FRONTEND)
7394 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7396 return expand_builtin (exp, target, subtarget, tmode, ignore);
7399 return expand_call (exp, target, ignore);
7401 case NON_LVALUE_EXPR:
7404 case REFERENCE_EXPR:
7405 if (TREE_OPERAND (exp, 0) == error_mark_node)
7408 if (TREE_CODE (type) == UNION_TYPE)
7410 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7412 /* If both input and output are BLKmode, this conversion
7413 isn't actually doing anything unless we need to make the
7414 alignment stricter. */
7415 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7416 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7417 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7418 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7422 target = assign_temp (type, 0, 1, 1);
7424 if (GET_CODE (target) == MEM)
7425 /* Store data into beginning of memory target. */
7426 store_expr (TREE_OPERAND (exp, 0),
7427 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7429 else if (GET_CODE (target) == REG)
7430 /* Store this field into a union of the proper type. */
7431 store_field (target,
7432 MIN ((int_size_in_bytes (TREE_TYPE
7433 (TREE_OPERAND (exp, 0)))
7435 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7436 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7437 VOIDmode, 0, BITS_PER_UNIT,
7438 int_size_in_bytes (type), 0);
7442 /* Return the entire union. */
7446 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7448 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7451 /* If the signedness of the conversion differs and OP0 is
7452 a promoted SUBREG, clear that indication since we now
7453 have to do the proper extension. */
7454 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7455 && GET_CODE (op0) == SUBREG)
7456 SUBREG_PROMOTED_VAR_P (op0) = 0;
7461 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7462 if (GET_MODE (op0) == mode)
7465 /* If OP0 is a constant, just convert it into the proper mode. */
7466 if (CONSTANT_P (op0))
7468 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7469 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7471 if (modifier == EXPAND_INITIALIZER)
7472 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7476 convert_to_mode (mode, op0,
7477 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7479 convert_move (target, op0,
7480 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7484 /* We come here from MINUS_EXPR when the second operand is a
7487 this_optab = ! unsignedp && flag_trapv
7488 && (GET_MODE_CLASS(mode) == MODE_INT)
7489 ? addv_optab : add_optab;
7491 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7492 something else, make sure we add the register to the constant and
7493 then to the other thing. This case can occur during strength
7494 reduction and doing it this way will produce better code if the
7495 frame pointer or argument pointer is eliminated.
7497 fold-const.c will ensure that the constant is always in the inner
7498 PLUS_EXPR, so the only case we need to do anything about is if
7499 sp, ap, or fp is our second argument, in which case we must swap
7500 the innermost first argument and our second argument. */
7502 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7503 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7504 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7505 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7506 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7507 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7509 tree t = TREE_OPERAND (exp, 1);
7511 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7512 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7515 /* If the result is to be ptr_mode and we are adding an integer to
7516 something, we might be forming a constant. So try to use
7517 plus_constant. If it produces a sum and we can't accept it,
7518 use force_operand. This allows P = &ARR[const] to generate
7519 efficient code on machines where a SYMBOL_REF is not a valid
7522 If this is an EXPAND_SUM call, always return the sum. */
7523 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7524 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7526 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7527 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7528 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7532 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7534 /* Use immed_double_const to ensure that the constant is
7535 truncated according to the mode of OP1, then sign extended
7536 to a HOST_WIDE_INT. Using the constant directly can result
7537 in non-canonical RTL in a 64x32 cross compile. */
7539 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7541 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7542 op1 = plus_constant (op1, INTVAL (constant_part));
7543 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7544 op1 = force_operand (op1, target);
7548 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7549 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7550 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7554 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7556 if (! CONSTANT_P (op0))
7558 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7559 VOIDmode, modifier);
7560 /* Don't go to both_summands if modifier
7561 says it's not right to return a PLUS. */
7562 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7566 /* Use immed_double_const to ensure that the constant is
7567 truncated according to the mode of OP1, then sign extended
7568 to a HOST_WIDE_INT. Using the constant directly can result
7569 in non-canonical RTL in a 64x32 cross compile. */
7571 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7573 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7574 op0 = plus_constant (op0, INTVAL (constant_part));
7575 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7576 op0 = force_operand (op0, target);
7581 /* No sense saving up arithmetic to be done
7582 if it's all in the wrong mode to form part of an address.
7583 And force_operand won't know whether to sign-extend or
7585 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7586 || mode != ptr_mode)
7589 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7592 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7593 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7596 /* Make sure any term that's a sum with a constant comes last. */
7597 if (GET_CODE (op0) == PLUS
7598 && CONSTANT_P (XEXP (op0, 1)))
7604 /* If adding to a sum including a constant,
7605 associate it to put the constant outside. */
7606 if (GET_CODE (op1) == PLUS
7607 && CONSTANT_P (XEXP (op1, 1)))
7609 rtx constant_term = const0_rtx;
7611 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7614 /* Ensure that MULT comes first if there is one. */
7615 else if (GET_CODE (op0) == MULT)
7616 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7618 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7620 /* Let's also eliminate constants from op0 if possible. */
7621 op0 = eliminate_constant_term (op0, &constant_term);
7623 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7624 their sum should be a constant. Form it into OP1, since the
7625 result we want will then be OP0 + OP1. */
7627 temp = simplify_binary_operation (PLUS, mode, constant_term,
7632 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7635 /* Put a constant term last and put a multiplication first. */
7636 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7637 temp = op1, op1 = op0, op0 = temp;
7639 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7640 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7643 /* For initializers, we are allowed to return a MINUS of two
7644 symbolic constants. Here we handle all cases when both operands
7646 /* Handle difference of two symbolic constants,
7647 for the sake of an initializer. */
7648 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7649 && really_constant_p (TREE_OPERAND (exp, 0))
7650 && really_constant_p (TREE_OPERAND (exp, 1)))
7652 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7653 VOIDmode, ro_modifier);
7654 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7655 VOIDmode, ro_modifier);
7657 /* If the last operand is a CONST_INT, use plus_constant of
7658 the negated constant. Else make the MINUS. */
7659 if (GET_CODE (op1) == CONST_INT)
7660 return plus_constant (op0, - INTVAL (op1));
7662 return gen_rtx_MINUS (mode, op0, op1);
7664 /* Convert A - const to A + (-const). */
7665 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7667 tree negated = fold (build1 (NEGATE_EXPR, type,
7668 TREE_OPERAND (exp, 1)));
7670 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7671 /* If we can't negate the constant in TYPE, leave it alone and
7672 expand_binop will negate it for us. We used to try to do it
7673 here in the signed version of TYPE, but that doesn't work
7674 on POINTER_TYPEs. */;
7677 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7681 this_optab = ! unsignedp && flag_trapv
7682 && (GET_MODE_CLASS(mode) == MODE_INT)
7683 ? subv_optab : sub_optab;
7687 /* If first operand is constant, swap them.
7688 Thus the following special case checks need only
7689 check the second operand. */
7690 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7692 register tree t1 = TREE_OPERAND (exp, 0);
7693 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7694 TREE_OPERAND (exp, 1) = t1;
7697 /* Attempt to return something suitable for generating an
7698 indexed address, for machines that support that. */
7700 if (modifier == EXPAND_SUM && mode == ptr_mode
7701 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7702 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7704 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7707 /* Apply distributive law if OP0 is x+c. */
7708 if (GET_CODE (op0) == PLUS
7709 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7714 (mode, XEXP (op0, 0),
7715 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7716 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7717 * INTVAL (XEXP (op0, 1))));
7719 if (GET_CODE (op0) != REG)
7720 op0 = force_operand (op0, NULL_RTX);
7721 if (GET_CODE (op0) != REG)
7722 op0 = copy_to_mode_reg (mode, op0);
7725 gen_rtx_MULT (mode, op0,
7726 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7729 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7732 /* Check for multiplying things that have been extended
7733 from a narrower type. If this machine supports multiplying
7734 in that narrower type with a result in the desired type,
7735 do it that way, and avoid the explicit type-conversion. */
7736 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7737 && TREE_CODE (type) == INTEGER_TYPE
7738 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7739 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7740 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7741 && int_fits_type_p (TREE_OPERAND (exp, 1),
7742 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7743 /* Don't use a widening multiply if a shift will do. */
7744 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7745 > HOST_BITS_PER_WIDE_INT)
7746 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7748 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7749 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7751 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7752 /* If both operands are extended, they must either both
7753 be zero-extended or both be sign-extended. */
7754 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7756 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7758 enum machine_mode innermode
7759 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7760 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7761 ? smul_widen_optab : umul_widen_optab);
7762 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7763 ? umul_widen_optab : smul_widen_optab);
7764 if (mode == GET_MODE_WIDER_MODE (innermode))
7766 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7768 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7769 NULL_RTX, VOIDmode, 0);
7770 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7771 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7774 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7775 NULL_RTX, VOIDmode, 0);
7778 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7779 && innermode == word_mode)
7782 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7783 NULL_RTX, VOIDmode, 0);
7784 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7785 op1 = convert_modes (innermode, mode,
7786 expand_expr (TREE_OPERAND (exp, 1),
7787 NULL_RTX, VOIDmode, 0),
7790 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7791 NULL_RTX, VOIDmode, 0);
7792 temp = expand_binop (mode, other_optab, op0, op1, target,
7793 unsignedp, OPTAB_LIB_WIDEN);
7794 htem = expand_mult_highpart_adjust (innermode,
7795 gen_highpart (innermode, temp),
7797 gen_highpart (innermode, temp),
7799 emit_move_insn (gen_highpart (innermode, temp), htem);
7804 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7805 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7806 return expand_mult (mode, op0, op1, target, unsignedp);
7808 case TRUNC_DIV_EXPR:
7809 case FLOOR_DIV_EXPR:
7811 case ROUND_DIV_EXPR:
7812 case EXACT_DIV_EXPR:
7813 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7815 /* Possible optimization: compute the dividend with EXPAND_SUM
7816 then if the divisor is constant can optimize the case
7817 where some terms of the dividend have coeffs divisible by it. */
7818 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7819 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7820 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7823 this_optab = flodiv_optab;
7826 case TRUNC_MOD_EXPR:
7827 case FLOOR_MOD_EXPR:
7829 case ROUND_MOD_EXPR:
7830 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7832 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7833 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7834 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7836 case FIX_ROUND_EXPR:
7837 case FIX_FLOOR_EXPR:
7839 abort (); /* Not used for C. */
7841 case FIX_TRUNC_EXPR:
7842 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7844 target = gen_reg_rtx (mode);
7845 expand_fix (target, op0, unsignedp);
7849 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7851 target = gen_reg_rtx (mode);
7852 /* expand_float can't figure out what to do if FROM has VOIDmode.
7853 So give it the correct mode. With -O, cse will optimize this. */
7854 if (GET_MODE (op0) == VOIDmode)
7855 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7857 expand_float (target, op0,
7858 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7862 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7863 temp = expand_unop (mode,
7864 ! unsignedp && flag_trapv
7865 && (GET_MODE_CLASS(mode) == MODE_INT)
7866 ? negv_optab : neg_optab, op0, target, 0);
7872 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7874 /* Handle complex values specially. */
7875 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7876 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7877 return expand_complex_abs (mode, op0, target, unsignedp);
7879 /* Unsigned abs is simply the operand. Testing here means we don't
7880 risk generating incorrect code below. */
7881 if (TREE_UNSIGNED (type))
7884 return expand_abs (mode, op0, target, unsignedp,
7885 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7889 target = original_target;
7890 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7891 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7892 || GET_MODE (target) != mode
7893 || (GET_CODE (target) == REG
7894 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7895 target = gen_reg_rtx (mode);
7896 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7897 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7899 /* First try to do it with a special MIN or MAX instruction.
7900 If that does not win, use a conditional jump to select the proper
7902 this_optab = (TREE_UNSIGNED (type)
7903 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7904 : (code == MIN_EXPR ? smin_optab : smax_optab));
7906 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7911 /* At this point, a MEM target is no longer useful; we will get better
7914 if (GET_CODE (target) == MEM)
7915 target = gen_reg_rtx (mode);
7918 emit_move_insn (target, op0);
7920 op0 = gen_label_rtx ();
7922 /* If this mode is an integer too wide to compare properly,
7923 compare word by word. Rely on cse to optimize constant cases. */
7924 if (GET_MODE_CLASS (mode) == MODE_INT
7925 && ! can_compare_p (GE, mode, ccp_jump))
7927 if (code == MAX_EXPR)
7928 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7929 target, op1, NULL_RTX, op0);
7931 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7932 op1, target, NULL_RTX, op0);
7936 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7937 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7938 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7941 emit_move_insn (target, op1);
7946 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7947 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7953 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7954 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7959 /* ??? Can optimize bitwise operations with one arg constant.
7960 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7961 and (a bitwise1 b) bitwise2 b (etc)
7962 but that is probably not worth while. */
7964 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7965 boolean values when we want in all cases to compute both of them. In
7966 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7967 as actual zero-or-1 values and then bitwise anding. In cases where
7968 there cannot be any side effects, better code would be made by
7969 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7970 how to recognize those cases. */
7972 case TRUTH_AND_EXPR:
7974 this_optab = and_optab;
7979 this_optab = ior_optab;
7982 case TRUTH_XOR_EXPR:
7984 this_optab = xor_optab;
7991 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7993 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7994 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7997 /* Could determine the answer when only additive constants differ. Also,
7998 the addition of one can be handled by changing the condition. */
8005 case UNORDERED_EXPR:
8012 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8016 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8017 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8019 && GET_CODE (original_target) == REG
8020 && (GET_MODE (original_target)
8021 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8023 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8026 if (temp != original_target)
8027 temp = copy_to_reg (temp);
8029 op1 = gen_label_rtx ();
8030 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8031 GET_MODE (temp), unsignedp, 0, op1);
8032 emit_move_insn (temp, const1_rtx);
8037 /* If no set-flag instruction, must generate a conditional
8038 store into a temporary variable. Drop through
8039 and handle this like && and ||. */
8041 case TRUTH_ANDIF_EXPR:
8042 case TRUTH_ORIF_EXPR:
8044 && (target == 0 || ! safe_from_p (target, exp, 1)
8045 /* Make sure we don't have a hard reg (such as function's return
8046 value) live across basic blocks, if not optimizing. */
8047 || (!optimize && GET_CODE (target) == REG
8048 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8049 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8052 emit_clr_insn (target);
8054 op1 = gen_label_rtx ();
8055 jumpifnot (exp, op1);
8058 emit_0_to_1_insn (target);
8061 return ignore ? const0_rtx : target;
8063 case TRUTH_NOT_EXPR:
8064 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8065 /* The parser is careful to generate TRUTH_NOT_EXPR
8066 only with operands that are always zero or one. */
8067 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8068 target, 1, OPTAB_LIB_WIDEN);
8074 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8076 return expand_expr (TREE_OPERAND (exp, 1),
8077 (ignore ? const0_rtx : target),
8081 /* If we would have a "singleton" (see below) were it not for a
8082 conversion in each arm, bring that conversion back out. */
8083 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8084 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8085 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8086 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8088 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8089 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8091 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8092 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8093 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8094 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8095 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8096 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8097 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8098 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8099 return expand_expr (build1 (NOP_EXPR, type,
8100 build (COND_EXPR, TREE_TYPE (iftrue),
8101 TREE_OPERAND (exp, 0),
8103 target, tmode, modifier);
8107 /* Note that COND_EXPRs whose type is a structure or union
8108 are required to be constructed to contain assignments of
8109 a temporary variable, so that we can evaluate them here
8110 for side effect only. If type is void, we must do likewise. */
8112 /* If an arm of the branch requires a cleanup,
8113 only that cleanup is performed. */
8116 tree binary_op = 0, unary_op = 0;
8118 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8119 convert it to our mode, if necessary. */
8120 if (integer_onep (TREE_OPERAND (exp, 1))
8121 && integer_zerop (TREE_OPERAND (exp, 2))
8122 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8126 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8131 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8132 if (GET_MODE (op0) == mode)
8136 target = gen_reg_rtx (mode);
8137 convert_move (target, op0, unsignedp);
8141 /* Check for X ? A + B : A. If we have this, we can copy A to the
8142 output and conditionally add B. Similarly for unary operations.
8143 Don't do this if X has side-effects because those side effects
8144 might affect A or B and the "?" operation is a sequence point in
8145 ANSI. (operand_equal_p tests for side effects.) */
8147 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8148 && operand_equal_p (TREE_OPERAND (exp, 2),
8149 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8150 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8151 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8152 && operand_equal_p (TREE_OPERAND (exp, 1),
8153 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8154 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8155 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8156 && operand_equal_p (TREE_OPERAND (exp, 2),
8157 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8158 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8159 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8160 && operand_equal_p (TREE_OPERAND (exp, 1),
8161 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8162 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8164 /* If we are not to produce a result, we have no target. Otherwise,
8165 if a target was specified use it; it will not be used as an
8166 intermediate target unless it is safe. If no target, use a
8171 else if (original_target
8172 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8173 || (singleton && GET_CODE (original_target) == REG
8174 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8175 && original_target == var_rtx (singleton)))
8176 && GET_MODE (original_target) == mode
8177 #ifdef HAVE_conditional_move
8178 && (! can_conditionally_move_p (mode)
8179 || GET_CODE (original_target) == REG
8180 || TREE_ADDRESSABLE (type))
8182 && ! (GET_CODE (original_target) == MEM
8183 && MEM_VOLATILE_P (original_target)))
8184 temp = original_target;
8185 else if (TREE_ADDRESSABLE (type))
8188 temp = assign_temp (type, 0, 0, 1);
8190 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8191 do the test of X as a store-flag operation, do this as
8192 A + ((X != 0) << log C). Similarly for other simple binary
8193 operators. Only do for C == 1 if BRANCH_COST is low. */
8194 if (temp && singleton && binary_op
8195 && (TREE_CODE (binary_op) == PLUS_EXPR
8196 || TREE_CODE (binary_op) == MINUS_EXPR
8197 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8198 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8199 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8200 : integer_onep (TREE_OPERAND (binary_op, 1)))
8201 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8204 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8205 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8206 ? addv_optab : add_optab)
8207 : TREE_CODE (binary_op) == MINUS_EXPR
8208 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8209 ? subv_optab : sub_optab)
8210 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8213 /* If we had X ? A : A + 1, do this as A + (X == 0).
8215 We have to invert the truth value here and then put it
8216 back later if do_store_flag fails. We cannot simply copy
8217 TREE_OPERAND (exp, 0) to another variable and modify that
8218 because invert_truthvalue can modify the tree pointed to
8220 if (singleton == TREE_OPERAND (exp, 1))
8221 TREE_OPERAND (exp, 0)
8222 = invert_truthvalue (TREE_OPERAND (exp, 0));
8224 result = do_store_flag (TREE_OPERAND (exp, 0),
8225 (safe_from_p (temp, singleton, 1)
8227 mode, BRANCH_COST <= 1);
8229 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8230 result = expand_shift (LSHIFT_EXPR, mode, result,
8231 build_int_2 (tree_log2
8235 (safe_from_p (temp, singleton, 1)
8236 ? temp : NULL_RTX), 0);
8240 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8241 return expand_binop (mode, boptab, op1, result, temp,
8242 unsignedp, OPTAB_LIB_WIDEN);
8244 else if (singleton == TREE_OPERAND (exp, 1))
8245 TREE_OPERAND (exp, 0)
8246 = invert_truthvalue (TREE_OPERAND (exp, 0));
8249 do_pending_stack_adjust ();
8251 op0 = gen_label_rtx ();
8253 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8257 /* If the target conflicts with the other operand of the
8258 binary op, we can't use it. Also, we can't use the target
8259 if it is a hard register, because evaluating the condition
8260 might clobber it. */
8262 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8263 || (GET_CODE (temp) == REG
8264 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8265 temp = gen_reg_rtx (mode);
8266 store_expr (singleton, temp, 0);
8269 expand_expr (singleton,
8270 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8271 if (singleton == TREE_OPERAND (exp, 1))
8272 jumpif (TREE_OPERAND (exp, 0), op0);
8274 jumpifnot (TREE_OPERAND (exp, 0), op0);
8276 start_cleanup_deferral ();
8277 if (binary_op && temp == 0)
8278 /* Just touch the other operand. */
8279 expand_expr (TREE_OPERAND (binary_op, 1),
8280 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8282 store_expr (build (TREE_CODE (binary_op), type,
8283 make_tree (type, temp),
8284 TREE_OPERAND (binary_op, 1)),
8287 store_expr (build1 (TREE_CODE (unary_op), type,
8288 make_tree (type, temp)),
8292 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8293 comparison operator. If we have one of these cases, set the
8294 output to A, branch on A (cse will merge these two references),
8295 then set the output to FOO. */
8297 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8298 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8299 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8300 TREE_OPERAND (exp, 1), 0)
8301 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8302 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8303 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8305 if (GET_CODE (temp) == REG
8306 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8307 temp = gen_reg_rtx (mode);
8308 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8309 jumpif (TREE_OPERAND (exp, 0), op0);
8311 start_cleanup_deferral ();
8312 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8316 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8317 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8318 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8319 TREE_OPERAND (exp, 2), 0)
8320 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8321 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8322 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8324 if (GET_CODE (temp) == REG
8325 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8326 temp = gen_reg_rtx (mode);
8327 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8328 jumpifnot (TREE_OPERAND (exp, 0), op0);
8330 start_cleanup_deferral ();
8331 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8336 op1 = gen_label_rtx ();
8337 jumpifnot (TREE_OPERAND (exp, 0), op0);
8339 start_cleanup_deferral ();
8341 /* One branch of the cond can be void, if it never returns. For
8342 example A ? throw : E */
8344 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8345 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8347 expand_expr (TREE_OPERAND (exp, 1),
8348 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8349 end_cleanup_deferral ();
8351 emit_jump_insn (gen_jump (op1));
8354 start_cleanup_deferral ();
8356 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8357 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8359 expand_expr (TREE_OPERAND (exp, 2),
8360 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8363 end_cleanup_deferral ();
8374 /* Something needs to be initialized, but we didn't know
8375 where that thing was when building the tree. For example,
8376 it could be the return value of a function, or a parameter
8377 to a function which lays down in the stack, or a temporary
8378 variable which must be passed by reference.
8380 We guarantee that the expression will either be constructed
8381 or copied into our original target. */
8383 tree slot = TREE_OPERAND (exp, 0);
8384 tree cleanups = NULL_TREE;
8387 if (TREE_CODE (slot) != VAR_DECL)
8391 target = original_target;
8393 /* Set this here so that if we get a target that refers to a
8394 register variable that's already been used, put_reg_into_stack
8395 knows that it should fix up those uses. */
8396 TREE_USED (slot) = 1;
8400 if (DECL_RTL_SET_P (slot))
8402 target = DECL_RTL (slot);
8403 /* If we have already expanded the slot, so don't do
8405 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8410 target = assign_temp (type, 2, 0, 1);
8411 /* All temp slots at this level must not conflict. */
8412 preserve_temp_slots (target);
8413 SET_DECL_RTL (slot, target);
8414 if (TREE_ADDRESSABLE (slot))
8415 put_var_into_stack (slot);
8417 /* Since SLOT is not known to the called function
8418 to belong to its stack frame, we must build an explicit
8419 cleanup. This case occurs when we must build up a reference
8420 to pass the reference as an argument. In this case,
8421 it is very likely that such a reference need not be
8424 if (TREE_OPERAND (exp, 2) == 0)
8425 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8426 cleanups = TREE_OPERAND (exp, 2);
8431 /* This case does occur, when expanding a parameter which
8432 needs to be constructed on the stack. The target
8433 is the actual stack address that we want to initialize.
8434 The function we call will perform the cleanup in this case. */
8436 /* If we have already assigned it space, use that space,
8437 not target that we were passed in, as our target
8438 parameter is only a hint. */
8439 if (DECL_RTL_SET_P (slot))
8441 target = DECL_RTL (slot);
8442 /* If we have already expanded the slot, so don't do
8444 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8449 SET_DECL_RTL (slot, target);
8450 /* If we must have an addressable slot, then make sure that
8451 the RTL that we just stored in slot is OK. */
8452 if (TREE_ADDRESSABLE (slot))
8453 put_var_into_stack (slot);
8457 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8458 /* Mark it as expanded. */
8459 TREE_OPERAND (exp, 1) = NULL_TREE;
8461 store_expr (exp1, target, 0);
8463 expand_decl_cleanup (NULL_TREE, cleanups);
8470 tree lhs = TREE_OPERAND (exp, 0);
8471 tree rhs = TREE_OPERAND (exp, 1);
8472 tree noncopied_parts = 0;
8473 tree lhs_type = TREE_TYPE (lhs);
8475 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8476 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8478 = init_noncopied_parts (stabilize_reference (lhs),
8479 TYPE_NONCOPIED_PARTS (lhs_type));
8481 while (noncopied_parts != 0)
8483 expand_assignment (TREE_VALUE (noncopied_parts),
8484 TREE_PURPOSE (noncopied_parts), 0, 0);
8485 noncopied_parts = TREE_CHAIN (noncopied_parts);
8492 /* If lhs is complex, expand calls in rhs before computing it.
8493 That's so we don't compute a pointer and save it over a call.
8494 If lhs is simple, compute it first so we can give it as a
8495 target if the rhs is just a call. This avoids an extra temp and copy
8496 and that prevents a partial-subsumption which makes bad code.
8497 Actually we could treat component_ref's of vars like vars. */
8499 tree lhs = TREE_OPERAND (exp, 0);
8500 tree rhs = TREE_OPERAND (exp, 1);
8501 tree noncopied_parts = 0;
8502 tree lhs_type = TREE_TYPE (lhs);
8506 /* Check for |= or &= of a bitfield of size one into another bitfield
8507 of size 1. In this case, (unless we need the result of the
8508 assignment) we can do this more efficiently with a
8509 test followed by an assignment, if necessary.
8511 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8512 things change so we do, this code should be enhanced to
8515 && TREE_CODE (lhs) == COMPONENT_REF
8516 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8517 || TREE_CODE (rhs) == BIT_AND_EXPR)
8518 && TREE_OPERAND (rhs, 0) == lhs
8519 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8520 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8521 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8523 rtx label = gen_label_rtx ();
8525 do_jump (TREE_OPERAND (rhs, 1),
8526 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8527 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8528 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8529 (TREE_CODE (rhs) == BIT_IOR_EXPR
8531 : integer_zero_node)),
8533 do_pending_stack_adjust ();
8538 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8539 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8541 = save_noncopied_parts (stabilize_reference (lhs),
8542 TYPE_NONCOPIED_PARTS (lhs_type));
8544 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8545 while (noncopied_parts != 0)
8547 expand_assignment (TREE_PURPOSE (noncopied_parts),
8548 TREE_VALUE (noncopied_parts), 0, 0);
8549 noncopied_parts = TREE_CHAIN (noncopied_parts);
8555 if (!TREE_OPERAND (exp, 0))
8556 expand_null_return ();
8558 expand_return (TREE_OPERAND (exp, 0));
8561 case PREINCREMENT_EXPR:
8562 case PREDECREMENT_EXPR:
8563 return expand_increment (exp, 0, ignore);
8565 case POSTINCREMENT_EXPR:
8566 case POSTDECREMENT_EXPR:
8567 /* Faster to treat as pre-increment if result is not used. */
8568 return expand_increment (exp, ! ignore, ignore);
8571 /* If nonzero, TEMP will be set to the address of something that might
8572 be a MEM corresponding to a stack slot. */
8575 /* Are we taking the address of a nested function? */
8576 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8577 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8578 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8579 && ! TREE_STATIC (exp))
8581 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8582 op0 = force_operand (op0, target);
8584 /* If we are taking the address of something erroneous, just
8586 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8590 /* We make sure to pass const0_rtx down if we came in with
8591 ignore set, to avoid doing the cleanups twice for something. */
8592 op0 = expand_expr (TREE_OPERAND (exp, 0),
8593 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8594 (modifier == EXPAND_INITIALIZER
8595 ? modifier : EXPAND_CONST_ADDRESS));
8597 /* If we are going to ignore the result, OP0 will have been set
8598 to const0_rtx, so just return it. Don't get confused and
8599 think we are taking the address of the constant. */
8603 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8604 clever and returns a REG when given a MEM. */
8605 op0 = protect_from_queue (op0, 1);
8607 /* We would like the object in memory. If it is a constant, we can
8608 have it be statically allocated into memory. For a non-constant,
8609 we need to allocate some memory and store the value into it. */
8611 if (CONSTANT_P (op0))
8612 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8614 else if (GET_CODE (op0) == MEM)
8616 mark_temp_addr_taken (op0);
8617 temp = XEXP (op0, 0);
8620 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8621 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8622 || GET_CODE (op0) == PARALLEL)
8624 /* If this object is in a register, it must be not
8626 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8627 tree nt = build_qualified_type (inner_type,
8628 (TYPE_QUALS (inner_type)
8629 | TYPE_QUAL_CONST));
8630 rtx memloc = assign_temp (nt, 1, 1, 1);
8632 mark_temp_addr_taken (memloc);
8633 if (GET_CODE (op0) == PARALLEL)
8634 /* Handle calls that pass values in multiple non-contiguous
8635 locations. The Irix 6 ABI has examples of this. */
8636 emit_group_store (memloc, op0,
8637 int_size_in_bytes (inner_type),
8638 TYPE_ALIGN (inner_type));
8640 emit_move_insn (memloc, op0);
8644 if (GET_CODE (op0) != MEM)
8647 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8649 temp = XEXP (op0, 0);
8650 #ifdef POINTERS_EXTEND_UNSIGNED
8651 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8652 && mode == ptr_mode)
8653 temp = convert_memory_address (ptr_mode, temp);
8658 op0 = force_operand (XEXP (op0, 0), target);
8661 if (flag_force_addr && GET_CODE (op0) != REG)
8662 op0 = force_reg (Pmode, op0);
8664 if (GET_CODE (op0) == REG
8665 && ! REG_USERVAR_P (op0))
8666 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8668 /* If we might have had a temp slot, add an equivalent address
8671 update_temp_slot_address (temp, op0);
8673 #ifdef POINTERS_EXTEND_UNSIGNED
8674 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8675 && mode == ptr_mode)
8676 op0 = convert_memory_address (ptr_mode, op0);
8681 case ENTRY_VALUE_EXPR:
8684 /* COMPLEX type for Extended Pascal & Fortran */
8687 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8690 /* Get the rtx code of the operands. */
8691 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8692 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8695 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8699 /* Move the real (op0) and imaginary (op1) parts to their location. */
8700 emit_move_insn (gen_realpart (mode, target), op0);
8701 emit_move_insn (gen_imagpart (mode, target), op1);
8703 insns = get_insns ();
8706 /* Complex construction should appear as a single unit. */
8707 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8708 each with a separate pseudo as destination.
8709 It's not correct for flow to treat them as a unit. */
8710 if (GET_CODE (target) != CONCAT)
8711 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8719 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8720 return gen_realpart (mode, op0);
8723 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8724 return gen_imagpart (mode, op0);
8728 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8732 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8735 target = gen_reg_rtx (mode);
8739 /* Store the realpart and the negated imagpart to target. */
8740 emit_move_insn (gen_realpart (partmode, target),
8741 gen_realpart (partmode, op0));
8743 imag_t = gen_imagpart (partmode, target);
8744 temp = expand_unop (partmode,
8745 ! unsignedp && flag_trapv
8746 && (GET_MODE_CLASS(partmode) == MODE_INT)
8747 ? negv_optab : neg_optab,
8748 gen_imagpart (partmode, op0), imag_t, 0);
8750 emit_move_insn (imag_t, temp);
8752 insns = get_insns ();
8755 /* Conjugate should appear as a single unit
8756 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8757 each with a separate pseudo as destination.
8758 It's not correct for flow to treat them as a unit. */
8759 if (GET_CODE (target) != CONCAT)
8760 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8767 case TRY_CATCH_EXPR:
8769 tree handler = TREE_OPERAND (exp, 1);
8771 expand_eh_region_start ();
8773 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8775 expand_eh_region_end_cleanup (handler);
8780 case TRY_FINALLY_EXPR:
8782 tree try_block = TREE_OPERAND (exp, 0);
8783 tree finally_block = TREE_OPERAND (exp, 1);
8784 rtx finally_label = gen_label_rtx ();
8785 rtx done_label = gen_label_rtx ();
8786 rtx return_link = gen_reg_rtx (Pmode);
8787 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8788 (tree) finally_label, (tree) return_link);
8789 TREE_SIDE_EFFECTS (cleanup) = 1;
8791 /* Start a new binding layer that will keep track of all cleanup
8792 actions to be performed. */
8793 expand_start_bindings (2);
8795 target_temp_slot_level = temp_slot_level;
8797 expand_decl_cleanup (NULL_TREE, cleanup);
8798 op0 = expand_expr (try_block, target, tmode, modifier);
8800 preserve_temp_slots (op0);
8801 expand_end_bindings (NULL_TREE, 0, 0);
8802 emit_jump (done_label);
8803 emit_label (finally_label);
8804 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8805 emit_indirect_jump (return_link);
8806 emit_label (done_label);
8810 case GOTO_SUBROUTINE_EXPR:
8812 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8813 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8814 rtx return_address = gen_label_rtx ();
8815 emit_move_insn (return_link,
8816 gen_rtx_LABEL_REF (Pmode, return_address));
8818 emit_label (return_address);
8823 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8826 return get_exception_pointer (cfun);
8829 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8832 /* Here to do an ordinary binary operator, generating an instruction
8833 from the optab already placed in `this_optab'. */
8835 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8837 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8838 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8840 temp = expand_binop (mode, this_optab, op0, op1, target,
8841 unsignedp, OPTAB_LIB_WIDEN);
8847 /* Similar to expand_expr, except that we don't specify a target, target
8848 mode, or modifier and we return the alignment of the inner type. This is
8849 used in cases where it is not necessary to align the result to the
8850 alignment of its type as long as we know the alignment of the result, for
8851 example for comparisons of BLKmode values. */
8854 expand_expr_unaligned (exp, palign)
8856 unsigned int *palign;
8859 tree type = TREE_TYPE (exp);
8860 register enum machine_mode mode = TYPE_MODE (type);
8862 /* Default the alignment we return to that of the type. */
8863 *palign = TYPE_ALIGN (type);
8865 /* The only cases in which we do anything special is if the resulting mode
8867 if (mode != BLKmode)
8868 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8870 switch (TREE_CODE (exp))
8874 case NON_LVALUE_EXPR:
8875 /* Conversions between BLKmode values don't change the underlying
8876 alignment or value. */
8877 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8878 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8882 /* Much of the code for this case is copied directly from expand_expr.
8883 We need to duplicate it here because we will do something different
8884 in the fall-through case, so we need to handle the same exceptions
8887 tree array = TREE_OPERAND (exp, 0);
8888 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8889 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8890 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8893 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8896 /* Optimize the special-case of a zero lower bound.
8898 We convert the low_bound to sizetype to avoid some problems
8899 with constant folding. (E.g. suppose the lower bound is 1,
8900 and its mode is QI. Without the conversion, (ARRAY
8901 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8902 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8904 if (! integer_zerop (low_bound))
8905 index = size_diffop (index, convert (sizetype, low_bound));
8907 /* If this is a constant index into a constant array,
8908 just get the value from the array. Handle both the cases when
8909 we have an explicit constructor and when our operand is a variable
8910 that was declared const. */
8912 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8913 && host_integerp (index, 0)
8914 && 0 > compare_tree_int (index,
8915 list_length (CONSTRUCTOR_ELTS
8916 (TREE_OPERAND (exp, 0)))))
8920 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8921 i = tree_low_cst (index, 0);
8922 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8926 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8929 else if (optimize >= 1
8930 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8931 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8932 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8934 if (TREE_CODE (index) == INTEGER_CST)
8936 tree init = DECL_INITIAL (array);
8938 if (TREE_CODE (init) == CONSTRUCTOR)
8942 for (elem = CONSTRUCTOR_ELTS (init);
8943 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8944 elem = TREE_CHAIN (elem))
8948 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8958 case ARRAY_RANGE_REF:
8959 /* If the operand is a CONSTRUCTOR, we can just extract the
8960 appropriate field if it is present. Don't do this if we have
8961 already written the data since we want to refer to that copy
8962 and varasm.c assumes that's what we'll do. */
8963 if (TREE_CODE (exp) == COMPONENT_REF
8964 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8965 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8969 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8970 elt = TREE_CHAIN (elt))
8971 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8972 /* Note that unlike the case in expand_expr, we know this is
8973 BLKmode and hence not an integer. */
8974 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8978 enum machine_mode mode1;
8979 HOST_WIDE_INT bitsize, bitpos;
8982 unsigned int alignment;
8984 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8985 &mode1, &unsignedp, &volatilep,
8988 /* If we got back the original object, something is wrong. Perhaps
8989 we are evaluating an expression too early. In any event, don't
8990 infinitely recurse. */
8994 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8996 /* If this is a constant, put it into a register if it is a
8997 legitimate constant and OFFSET is 0 and memory if it isn't. */
8998 if (CONSTANT_P (op0))
9000 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9002 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9004 op0 = force_reg (inner_mode, op0);
9006 op0 = validize_mem (force_const_mem (inner_mode, op0));
9011 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9013 /* If this object is in a register, put it into memory.
9014 This case can't occur in C, but can in Ada if we have
9015 unchecked conversion of an expression from a scalar type to
9016 an array or record type. */
9017 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9018 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9020 tree nt = build_qualified_type (TREE_TYPE (tem),
9021 (TYPE_QUALS (TREE_TYPE (tem))
9022 | TYPE_QUAL_CONST));
9023 rtx memloc = assign_temp (nt, 1, 1, 1);
9025 mark_temp_addr_taken (memloc);
9026 emit_move_insn (memloc, op0);
9030 if (GET_CODE (op0) != MEM)
9033 if (GET_MODE (offset_rtx) != ptr_mode)
9035 #ifdef POINTERS_EXTEND_UNSIGNED
9036 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9038 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9042 op0 = change_address (op0, VOIDmode,
9043 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9044 force_reg (ptr_mode,
9048 /* Don't forget about volatility even if this is a bitfield. */
9049 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9051 op0 = copy_rtx (op0);
9052 MEM_VOLATILE_P (op0) = 1;
9055 /* Check the access. */
9056 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9061 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9062 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9064 /* Check the access right of the pointer. */
9065 in_check_memory_usage = 1;
9066 if (size > BITS_PER_UNIT)
9067 emit_library_call (chkr_check_addr_libfunc,
9068 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9069 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9070 TYPE_MODE (sizetype),
9071 GEN_INT (MEMORY_USE_RO),
9072 TYPE_MODE (integer_type_node));
9073 in_check_memory_usage = 0;
9076 /* In cases where an aligned union has an unaligned object
9077 as a field, we might be extracting a BLKmode value from
9078 an integer-mode (e.g., SImode) object. Handle this case
9079 by doing the extract into an object as wide as the field
9080 (which we know to be the width of a basic mode), then
9081 storing into memory, and changing the mode to BLKmode.
9082 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9083 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9084 if (mode1 == VOIDmode
9085 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9086 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9087 && (TYPE_ALIGN (type) > alignment
9088 || bitpos % TYPE_ALIGN (type) != 0)))
9090 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9092 if (ext_mode == BLKmode)
9094 /* In this case, BITPOS must start at a byte boundary. */
9095 if (GET_CODE (op0) != MEM
9096 || bitpos % BITS_PER_UNIT != 0)
9099 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9103 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9105 rtx new = assign_temp (nt, 0, 1, 1);
9107 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9108 unsignedp, NULL_RTX, ext_mode,
9109 ext_mode, alignment,
9110 int_size_in_bytes (TREE_TYPE (tem)));
9112 /* If the result is a record type and BITSIZE is narrower than
9113 the mode of OP0, an integral mode, and this is a big endian
9114 machine, we must put the field into the high-order bits. */
9115 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9116 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9117 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9118 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9119 size_int (GET_MODE_BITSIZE
9124 emit_move_insn (new, op0);
9125 op0 = copy_rtx (new);
9126 PUT_MODE (op0, BLKmode);
9130 /* Get a reference to just this component. */
9131 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9133 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9135 /* Adjust the alignment in case the bit position is not
9136 a multiple of the alignment of the inner object. */
9137 while (bitpos % alignment != 0)
9140 if (GET_CODE (XEXP (op0, 0)) == REG)
9141 mark_reg_pointer (XEXP (op0, 0), alignment);
9143 MEM_IN_STRUCT_P (op0) = 1;
9144 MEM_VOLATILE_P (op0) |= volatilep;
9146 *palign = alignment;
9155 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9158 /* Return the tree node if a ARG corresponds to a string constant or zero
9159 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9160 in bytes within the string that ARG is accessing. The type of the
9161 offset will be `sizetype'. */
9164 string_constant (arg, ptr_offset)
9170 if (TREE_CODE (arg) == ADDR_EXPR
9171 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9173 *ptr_offset = size_zero_node;
9174 return TREE_OPERAND (arg, 0);
9176 else if (TREE_CODE (arg) == PLUS_EXPR)
9178 tree arg0 = TREE_OPERAND (arg, 0);
9179 tree arg1 = TREE_OPERAND (arg, 1);
9184 if (TREE_CODE (arg0) == ADDR_EXPR
9185 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9187 *ptr_offset = convert (sizetype, arg1);
9188 return TREE_OPERAND (arg0, 0);
9190 else if (TREE_CODE (arg1) == ADDR_EXPR
9191 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9193 *ptr_offset = convert (sizetype, arg0);
9194 return TREE_OPERAND (arg1, 0);
9201 /* Expand code for a post- or pre- increment or decrement
9202 and return the RTX for the result.
9203 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9206 expand_increment (exp, post, ignore)
9210 register rtx op0, op1;
9211 register rtx temp, value;
9212 register tree incremented = TREE_OPERAND (exp, 0);
9213 optab this_optab = add_optab;
9215 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9216 int op0_is_copy = 0;
9217 int single_insn = 0;
9218 /* 1 means we can't store into OP0 directly,
9219 because it is a subreg narrower than a word,
9220 and we don't dare clobber the rest of the word. */
9223 /* Stabilize any component ref that might need to be
9224 evaluated more than once below. */
9226 || TREE_CODE (incremented) == BIT_FIELD_REF
9227 || (TREE_CODE (incremented) == COMPONENT_REF
9228 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9229 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9230 incremented = stabilize_reference (incremented);
9231 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9232 ones into save exprs so that they don't accidentally get evaluated
9233 more than once by the code below. */
9234 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9235 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9236 incremented = save_expr (incremented);
9238 /* Compute the operands as RTX.
9239 Note whether OP0 is the actual lvalue or a copy of it:
9240 I believe it is a copy iff it is a register or subreg
9241 and insns were generated in computing it. */
9243 temp = get_last_insn ();
9244 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9246 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9247 in place but instead must do sign- or zero-extension during assignment,
9248 so we copy it into a new register and let the code below use it as
9251 Note that we can safely modify this SUBREG since it is know not to be
9252 shared (it was made by the expand_expr call above). */
9254 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9257 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9261 else if (GET_CODE (op0) == SUBREG
9262 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9264 /* We cannot increment this SUBREG in place. If we are
9265 post-incrementing, get a copy of the old value. Otherwise,
9266 just mark that we cannot increment in place. */
9268 op0 = copy_to_reg (op0);
9273 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9274 && temp != get_last_insn ());
9275 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9276 EXPAND_MEMORY_USE_BAD);
9278 /* Decide whether incrementing or decrementing. */
9279 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9280 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9281 this_optab = sub_optab;
9283 /* Convert decrement by a constant into a negative increment. */
9284 if (this_optab == sub_optab
9285 && GET_CODE (op1) == CONST_INT)
9287 op1 = GEN_INT (-INTVAL (op1));
9288 this_optab = add_optab;
9291 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9292 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9294 /* For a preincrement, see if we can do this with a single instruction. */
9297 icode = (int) this_optab->handlers[(int) mode].insn_code;
9298 if (icode != (int) CODE_FOR_nothing
9299 /* Make sure that OP0 is valid for operands 0 and 1
9300 of the insn we want to queue. */
9301 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9302 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9303 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9307 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9308 then we cannot just increment OP0. We must therefore contrive to
9309 increment the original value. Then, for postincrement, we can return
9310 OP0 since it is a copy of the old value. For preincrement, expand here
9311 unless we can do it with a single insn.
9313 Likewise if storing directly into OP0 would clobber high bits
9314 we need to preserve (bad_subreg). */
9315 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9317 /* This is the easiest way to increment the value wherever it is.
9318 Problems with multiple evaluation of INCREMENTED are prevented
9319 because either (1) it is a component_ref or preincrement,
9320 in which case it was stabilized above, or (2) it is an array_ref
9321 with constant index in an array in a register, which is
9322 safe to reevaluate. */
9323 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9324 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9325 ? MINUS_EXPR : PLUS_EXPR),
9328 TREE_OPERAND (exp, 1));
9330 while (TREE_CODE (incremented) == NOP_EXPR
9331 || TREE_CODE (incremented) == CONVERT_EXPR)
9333 newexp = convert (TREE_TYPE (incremented), newexp);
9334 incremented = TREE_OPERAND (incremented, 0);
9337 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9338 return post ? op0 : temp;
9343 /* We have a true reference to the value in OP0.
9344 If there is an insn to add or subtract in this mode, queue it.
9345 Queueing the increment insn avoids the register shuffling
9346 that often results if we must increment now and first save
9347 the old value for subsequent use. */
9349 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9350 op0 = stabilize (op0);
9353 icode = (int) this_optab->handlers[(int) mode].insn_code;
9354 if (icode != (int) CODE_FOR_nothing
9355 /* Make sure that OP0 is valid for operands 0 and 1
9356 of the insn we want to queue. */
9357 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9358 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9360 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9361 op1 = force_reg (mode, op1);
9363 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9365 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9367 rtx addr = (general_operand (XEXP (op0, 0), mode)
9368 ? force_reg (Pmode, XEXP (op0, 0))
9369 : copy_to_reg (XEXP (op0, 0)));
9372 op0 = change_address (op0, VOIDmode, addr);
9373 temp = force_reg (GET_MODE (op0), op0);
9374 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9375 op1 = force_reg (mode, op1);
9377 /* The increment queue is LIFO, thus we have to `queue'
9378 the instructions in reverse order. */
9379 enqueue_insn (op0, gen_move_insn (op0, temp));
9380 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9385 /* Preincrement, or we can't increment with one simple insn. */
9387 /* Save a copy of the value before inc or dec, to return it later. */
9388 temp = value = copy_to_reg (op0);
9390 /* Arrange to return the incremented value. */
9391 /* Copy the rtx because expand_binop will protect from the queue,
9392 and the results of that would be invalid for us to return
9393 if our caller does emit_queue before using our result. */
9394 temp = copy_rtx (value = op0);
9396 /* Increment however we can. */
9397 op1 = expand_binop (mode, this_optab, value, op1,
9398 current_function_check_memory_usage ? NULL_RTX : op0,
9399 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9400 /* Make sure the value is stored into OP0. */
9402 emit_move_insn (op0, op1);
9407 /* At the start of a function, record that we have no previously-pushed
9408 arguments waiting to be popped. */
9411 init_pending_stack_adjust ()
9413 pending_stack_adjust = 0;
9416 /* When exiting from function, if safe, clear out any pending stack adjust
9417 so the adjustment won't get done.
9419 Note, if the current function calls alloca, then it must have a
9420 frame pointer regardless of the value of flag_omit_frame_pointer. */
9423 clear_pending_stack_adjust ()
9425 #ifdef EXIT_IGNORE_STACK
9427 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9428 && EXIT_IGNORE_STACK
9429 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9430 && ! flag_inline_functions)
9432 stack_pointer_delta -= pending_stack_adjust,
9433 pending_stack_adjust = 0;
9438 /* Pop any previously-pushed arguments that have not been popped yet. */
9441 do_pending_stack_adjust ()
9443 if (inhibit_defer_pop == 0)
9445 if (pending_stack_adjust != 0)
9446 adjust_stack (GEN_INT (pending_stack_adjust));
9447 pending_stack_adjust = 0;
9451 /* Expand conditional expressions. */
9453 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9454 LABEL is an rtx of code CODE_LABEL, in this function and all the
9458 jumpifnot (exp, label)
9462 do_jump (exp, label, NULL_RTX);
9465 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9472 do_jump (exp, NULL_RTX, label);
9475 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9476 the result is zero, or IF_TRUE_LABEL if the result is one.
9477 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9478 meaning fall through in that case.
9480 do_jump always does any pending stack adjust except when it does not
9481 actually perform a jump. An example where there is no jump
9482 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9484 This function is responsible for optimizing cases such as
9485 &&, || and comparison operators in EXP. */
9488 do_jump (exp, if_false_label, if_true_label)
9490 rtx if_false_label, if_true_label;
9492 register enum tree_code code = TREE_CODE (exp);
9493 /* Some cases need to create a label to jump to
9494 in order to properly fall through.
9495 These cases set DROP_THROUGH_LABEL nonzero. */
9496 rtx drop_through_label = 0;
9500 enum machine_mode mode;
9502 #ifdef MAX_INTEGER_COMPUTATION_MODE
9503 check_max_integer_computation_mode (exp);
9514 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9520 /* This is not true with #pragma weak */
9522 /* The address of something can never be zero. */
9524 emit_jump (if_true_label);
9529 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9530 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9531 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9532 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9535 /* If we are narrowing the operand, we have to do the compare in the
9537 if ((TYPE_PRECISION (TREE_TYPE (exp))
9538 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9540 case NON_LVALUE_EXPR:
9541 case REFERENCE_EXPR:
9546 /* These cannot change zero->non-zero or vice versa. */
9547 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9550 case WITH_RECORD_EXPR:
9551 /* Put the object on the placeholder list, recurse through our first
9552 operand, and pop the list. */
9553 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9555 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9556 placeholder_list = TREE_CHAIN (placeholder_list);
9560 /* This is never less insns than evaluating the PLUS_EXPR followed by
9561 a test and can be longer if the test is eliminated. */
9563 /* Reduce to minus. */
9564 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9565 TREE_OPERAND (exp, 0),
9566 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9567 TREE_OPERAND (exp, 1))));
9568 /* Process as MINUS. */
9572 /* Non-zero iff operands of minus differ. */
9573 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9574 TREE_OPERAND (exp, 0),
9575 TREE_OPERAND (exp, 1)),
9576 NE, NE, if_false_label, if_true_label);
9580 /* If we are AND'ing with a small constant, do this comparison in the
9581 smallest type that fits. If the machine doesn't have comparisons
9582 that small, it will be converted back to the wider comparison.
9583 This helps if we are testing the sign bit of a narrower object.
9584 combine can't do this for us because it can't know whether a
9585 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9587 if (! SLOW_BYTE_ACCESS
9588 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9589 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9590 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9591 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9592 && (type = type_for_mode (mode, 1)) != 0
9593 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9594 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9595 != CODE_FOR_nothing))
9597 do_jump (convert (type, exp), if_false_label, if_true_label);
9602 case TRUTH_NOT_EXPR:
9603 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9606 case TRUTH_ANDIF_EXPR:
9607 if (if_false_label == 0)
9608 if_false_label = drop_through_label = gen_label_rtx ();
9609 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9610 start_cleanup_deferral ();
9611 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9612 end_cleanup_deferral ();
9615 case TRUTH_ORIF_EXPR:
9616 if (if_true_label == 0)
9617 if_true_label = drop_through_label = gen_label_rtx ();
9618 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9619 start_cleanup_deferral ();
9620 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9621 end_cleanup_deferral ();
9626 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9627 preserve_temp_slots (NULL_RTX);
9631 do_pending_stack_adjust ();
9632 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9638 case ARRAY_RANGE_REF:
9640 HOST_WIDE_INT bitsize, bitpos;
9642 enum machine_mode mode;
9646 unsigned int alignment;
9648 /* Get description of this reference. We don't actually care
9649 about the underlying object here. */
9650 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9651 &unsignedp, &volatilep, &alignment);
9653 type = type_for_size (bitsize, unsignedp);
9654 if (! SLOW_BYTE_ACCESS
9655 && type != 0 && bitsize >= 0
9656 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9657 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9658 != CODE_FOR_nothing))
9660 do_jump (convert (type, exp), if_false_label, if_true_label);
9667 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9668 if (integer_onep (TREE_OPERAND (exp, 1))
9669 && integer_zerop (TREE_OPERAND (exp, 2)))
9670 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9672 else if (integer_zerop (TREE_OPERAND (exp, 1))
9673 && integer_onep (TREE_OPERAND (exp, 2)))
9674 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9678 register rtx label1 = gen_label_rtx ();
9679 drop_through_label = gen_label_rtx ();
9681 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9683 start_cleanup_deferral ();
9684 /* Now the THEN-expression. */
9685 do_jump (TREE_OPERAND (exp, 1),
9686 if_false_label ? if_false_label : drop_through_label,
9687 if_true_label ? if_true_label : drop_through_label);
9688 /* In case the do_jump just above never jumps. */
9689 do_pending_stack_adjust ();
9690 emit_label (label1);
9692 /* Now the ELSE-expression. */
9693 do_jump (TREE_OPERAND (exp, 2),
9694 if_false_label ? if_false_label : drop_through_label,
9695 if_true_label ? if_true_label : drop_through_label);
9696 end_cleanup_deferral ();
9702 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9704 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9705 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9707 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9708 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9711 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9712 fold (build (EQ_EXPR, TREE_TYPE (exp),
9713 fold (build1 (REALPART_EXPR,
9714 TREE_TYPE (inner_type),
9716 fold (build1 (REALPART_EXPR,
9717 TREE_TYPE (inner_type),
9719 fold (build (EQ_EXPR, TREE_TYPE (exp),
9720 fold (build1 (IMAGPART_EXPR,
9721 TREE_TYPE (inner_type),
9723 fold (build1 (IMAGPART_EXPR,
9724 TREE_TYPE (inner_type),
9726 if_false_label, if_true_label);
9729 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9730 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9732 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9733 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9734 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9736 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9742 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9744 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9745 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9747 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9748 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9751 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9752 fold (build (NE_EXPR, TREE_TYPE (exp),
9753 fold (build1 (REALPART_EXPR,
9754 TREE_TYPE (inner_type),
9756 fold (build1 (REALPART_EXPR,
9757 TREE_TYPE (inner_type),
9759 fold (build (NE_EXPR, TREE_TYPE (exp),
9760 fold (build1 (IMAGPART_EXPR,
9761 TREE_TYPE (inner_type),
9763 fold (build1 (IMAGPART_EXPR,
9764 TREE_TYPE (inner_type),
9766 if_false_label, if_true_label);
9769 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9770 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9772 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9773 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9774 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9776 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9781 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9782 if (GET_MODE_CLASS (mode) == MODE_INT
9783 && ! can_compare_p (LT, mode, ccp_jump))
9784 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9786 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9790 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9791 if (GET_MODE_CLASS (mode) == MODE_INT
9792 && ! can_compare_p (LE, mode, ccp_jump))
9793 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9795 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9799 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9800 if (GET_MODE_CLASS (mode) == MODE_INT
9801 && ! can_compare_p (GT, mode, ccp_jump))
9802 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9804 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9808 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9809 if (GET_MODE_CLASS (mode) == MODE_INT
9810 && ! can_compare_p (GE, mode, ccp_jump))
9811 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9813 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9816 case UNORDERED_EXPR:
9819 enum rtx_code cmp, rcmp;
9822 if (code == UNORDERED_EXPR)
9823 cmp = UNORDERED, rcmp = ORDERED;
9825 cmp = ORDERED, rcmp = UNORDERED;
9826 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9829 if (! can_compare_p (cmp, mode, ccp_jump)
9830 && (can_compare_p (rcmp, mode, ccp_jump)
9831 /* If the target doesn't provide either UNORDERED or ORDERED
9832 comparisons, canonicalize on UNORDERED for the library. */
9833 || rcmp == UNORDERED))
9837 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9839 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9844 enum rtx_code rcode1;
9845 enum tree_code tcode2;
9869 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9870 if (can_compare_p (rcode1, mode, ccp_jump))
9871 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9875 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9876 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9879 /* If the target doesn't support combined unordered
9880 compares, decompose into UNORDERED + comparison. */
9881 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9882 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9883 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9884 do_jump (exp, if_false_label, if_true_label);
9890 __builtin_expect (<test>, 0) and
9891 __builtin_expect (<test>, 1)
9893 We need to do this here, so that <test> is not converted to a SCC
9894 operation on machines that use condition code registers and COMPARE
9895 like the PowerPC, and then the jump is done based on whether the SCC
9896 operation produced a 1 or 0. */
9898 /* Check for a built-in function. */
9899 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9901 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9902 tree arglist = TREE_OPERAND (exp, 1);
9904 if (TREE_CODE (fndecl) == FUNCTION_DECL
9905 && DECL_BUILT_IN (fndecl)
9906 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9907 && arglist != NULL_TREE
9908 && TREE_CHAIN (arglist) != NULL_TREE)
9910 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9913 if (seq != NULL_RTX)
9920 /* fall through and generate the normal code. */
9924 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9926 /* This is not needed any more and causes poor code since it causes
9927 comparisons and tests from non-SI objects to have different code
9929 /* Copy to register to avoid generating bad insns by cse
9930 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9931 if (!cse_not_expected && GET_CODE (temp) == MEM)
9932 temp = copy_to_reg (temp);
9934 do_pending_stack_adjust ();
9935 /* Do any postincrements in the expression that was tested. */
9938 if (GET_CODE (temp) == CONST_INT
9939 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9940 || GET_CODE (temp) == LABEL_REF)
9942 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9946 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9947 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9948 /* Note swapping the labels gives us not-equal. */
9949 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9950 else if (GET_MODE (temp) != VOIDmode)
9951 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9952 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9953 GET_MODE (temp), NULL_RTX, 0,
9954 if_false_label, if_true_label);
9959 if (drop_through_label)
9961 /* If do_jump produces code that might be jumped around,
9962 do any stack adjusts from that code, before the place
9963 where control merges in. */
9964 do_pending_stack_adjust ();
9965 emit_label (drop_through_label);
9969 /* Given a comparison expression EXP for values too wide to be compared
9970 with one insn, test the comparison and jump to the appropriate label.
9971 The code of EXP is ignored; we always test GT if SWAP is 0,
9972 and LT if SWAP is 1. */
9975 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9978 rtx if_false_label, if_true_label;
9980 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9981 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9982 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9983 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9985 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9988 /* Compare OP0 with OP1, word at a time, in mode MODE.
9989 UNSIGNEDP says to do unsigned comparison.
9990 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9993 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9994 enum machine_mode mode;
9997 rtx if_false_label, if_true_label;
9999 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10000 rtx drop_through_label = 0;
10003 if (! if_true_label || ! if_false_label)
10004 drop_through_label = gen_label_rtx ();
10005 if (! if_true_label)
10006 if_true_label = drop_through_label;
10007 if (! if_false_label)
10008 if_false_label = drop_through_label;
10010 /* Compare a word at a time, high order first. */
10011 for (i = 0; i < nwords; i++)
10013 rtx op0_word, op1_word;
10015 if (WORDS_BIG_ENDIAN)
10017 op0_word = operand_subword_force (op0, i, mode);
10018 op1_word = operand_subword_force (op1, i, mode);
10022 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10023 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10026 /* All but high-order word must be compared as unsigned. */
10027 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10028 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10029 NULL_RTX, if_true_label);
10031 /* Consider lower words only if these are equal. */
10032 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10033 NULL_RTX, 0, NULL_RTX, if_false_label);
10036 if (if_false_label)
10037 emit_jump (if_false_label);
10038 if (drop_through_label)
10039 emit_label (drop_through_label);
10042 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10043 with one insn, test the comparison and jump to the appropriate label. */
10046 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10048 rtx if_false_label, if_true_label;
10050 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10051 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10052 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10053 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10055 rtx drop_through_label = 0;
10057 if (! if_false_label)
10058 drop_through_label = if_false_label = gen_label_rtx ();
10060 for (i = 0; i < nwords; i++)
10061 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10062 operand_subword_force (op1, i, mode),
10063 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10064 word_mode, NULL_RTX, 0, if_false_label,
10068 emit_jump (if_true_label);
10069 if (drop_through_label)
10070 emit_label (drop_through_label);
10073 /* Jump according to whether OP0 is 0.
10074 We assume that OP0 has an integer mode that is too wide
10075 for the available compare insns. */
10078 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10080 rtx if_false_label, if_true_label;
10082 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10085 rtx drop_through_label = 0;
10087 /* The fastest way of doing this comparison on almost any machine is to
10088 "or" all the words and compare the result. If all have to be loaded
10089 from memory and this is a very wide item, it's possible this may
10090 be slower, but that's highly unlikely. */
10092 part = gen_reg_rtx (word_mode);
10093 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10094 for (i = 1; i < nwords && part != 0; i++)
10095 part = expand_binop (word_mode, ior_optab, part,
10096 operand_subword_force (op0, i, GET_MODE (op0)),
10097 part, 1, OPTAB_WIDEN);
10101 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10102 NULL_RTX, 0, if_false_label, if_true_label);
10107 /* If we couldn't do the "or" simply, do this with a series of compares. */
10108 if (! if_false_label)
10109 drop_through_label = if_false_label = gen_label_rtx ();
10111 for (i = 0; i < nwords; i++)
10112 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10113 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10114 if_false_label, NULL_RTX);
10117 emit_jump (if_true_label);
10119 if (drop_through_label)
10120 emit_label (drop_through_label);
10123 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10124 (including code to compute the values to be compared)
10125 and set (CC0) according to the result.
10126 The decision as to signed or unsigned comparison must be made by the caller.
10128 We force a stack adjustment unless there are currently
10129 things pushed on the stack that aren't yet used.
10131 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10134 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10135 size of MODE should be used. */
10138 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10139 register rtx op0, op1;
10140 enum rtx_code code;
10142 enum machine_mode mode;
10144 unsigned int align;
10148 /* If one operand is constant, make it the second one. Only do this
10149 if the other operand is not constant as well. */
10151 if (swap_commutative_operands_p (op0, op1))
10156 code = swap_condition (code);
10159 if (flag_force_mem)
10161 op0 = force_not_mem (op0);
10162 op1 = force_not_mem (op1);
10165 do_pending_stack_adjust ();
10167 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10168 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10172 /* There's no need to do this now that combine.c can eliminate lots of
10173 sign extensions. This can be less efficient in certain cases on other
10176 /* If this is a signed equality comparison, we can do it as an
10177 unsigned comparison since zero-extension is cheaper than sign
10178 extension and comparisons with zero are done as unsigned. This is
10179 the case even on machines that can do fast sign extension, since
10180 zero-extension is easier to combine with other operations than
10181 sign-extension is. If we are comparing against a constant, we must
10182 convert it to what it would look like unsigned. */
10183 if ((code == EQ || code == NE) && ! unsignedp
10184 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10186 if (GET_CODE (op1) == CONST_INT
10187 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10188 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10193 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10195 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10198 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10199 The decision as to signed or unsigned comparison must be made by the caller.
10201 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10204 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10205 size of MODE should be used. */
10208 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10209 if_false_label, if_true_label)
10210 register rtx op0, op1;
10211 enum rtx_code code;
10213 enum machine_mode mode;
10215 unsigned int align;
10216 rtx if_false_label, if_true_label;
10219 int dummy_true_label = 0;
10221 /* Reverse the comparison if that is safe and we want to jump if it is
10223 if (! if_true_label && ! FLOAT_MODE_P (mode))
10225 if_true_label = if_false_label;
10226 if_false_label = 0;
10227 code = reverse_condition (code);
10230 /* If one operand is constant, make it the second one. Only do this
10231 if the other operand is not constant as well. */
10233 if (swap_commutative_operands_p (op0, op1))
10238 code = swap_condition (code);
10241 if (flag_force_mem)
10243 op0 = force_not_mem (op0);
10244 op1 = force_not_mem (op1);
10247 do_pending_stack_adjust ();
10249 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10250 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10252 if (tem == const_true_rtx)
10255 emit_jump (if_true_label);
10259 if (if_false_label)
10260 emit_jump (if_false_label);
10266 /* There's no need to do this now that combine.c can eliminate lots of
10267 sign extensions. This can be less efficient in certain cases on other
10270 /* If this is a signed equality comparison, we can do it as an
10271 unsigned comparison since zero-extension is cheaper than sign
10272 extension and comparisons with zero are done as unsigned. This is
10273 the case even on machines that can do fast sign extension, since
10274 zero-extension is easier to combine with other operations than
10275 sign-extension is. If we are comparing against a constant, we must
10276 convert it to what it would look like unsigned. */
10277 if ((code == EQ || code == NE) && ! unsignedp
10278 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10280 if (GET_CODE (op1) == CONST_INT
10281 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10282 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10287 if (! if_true_label)
10289 dummy_true_label = 1;
10290 if_true_label = gen_label_rtx ();
10293 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10296 if (if_false_label)
10297 emit_jump (if_false_label);
10298 if (dummy_true_label)
10299 emit_label (if_true_label);
10302 /* Generate code for a comparison expression EXP (including code to compute
10303 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10304 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10305 generated code will drop through.
10306 SIGNED_CODE should be the rtx operation for this comparison for
10307 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10309 We force a stack adjustment unless there are currently
10310 things pushed on the stack that aren't yet used. */
10313 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10316 enum rtx_code signed_code, unsigned_code;
10317 rtx if_false_label, if_true_label;
10319 unsigned int align0, align1;
10320 register rtx op0, op1;
10321 register tree type;
10322 register enum machine_mode mode;
10324 enum rtx_code code;
10326 /* Don't crash if the comparison was erroneous. */
10327 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10328 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10331 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10332 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10335 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10336 mode = TYPE_MODE (type);
10337 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10338 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10339 || (GET_MODE_BITSIZE (mode)
10340 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10343 /* op0 might have been replaced by promoted constant, in which
10344 case the type of second argument should be used. */
10345 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10346 mode = TYPE_MODE (type);
10348 unsignedp = TREE_UNSIGNED (type);
10349 code = unsignedp ? unsigned_code : signed_code;
10351 #ifdef HAVE_canonicalize_funcptr_for_compare
10352 /* If function pointers need to be "canonicalized" before they can
10353 be reliably compared, then canonicalize them. */
10354 if (HAVE_canonicalize_funcptr_for_compare
10355 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10356 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10359 rtx new_op0 = gen_reg_rtx (mode);
10361 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10365 if (HAVE_canonicalize_funcptr_for_compare
10366 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10367 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10370 rtx new_op1 = gen_reg_rtx (mode);
10372 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10377 /* Do any postincrements in the expression that was tested. */
10380 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10382 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10383 MIN (align0, align1),
10384 if_false_label, if_true_label);
10387 /* Generate code to calculate EXP using a store-flag instruction
10388 and return an rtx for the result. EXP is either a comparison
10389 or a TRUTH_NOT_EXPR whose operand is a comparison.
10391 If TARGET is nonzero, store the result there if convenient.
10393 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10396 Return zero if there is no suitable set-flag instruction
10397 available on this machine.
10399 Once expand_expr has been called on the arguments of the comparison,
10400 we are committed to doing the store flag, since it is not safe to
10401 re-evaluate the expression. We emit the store-flag insn by calling
10402 emit_store_flag, but only expand the arguments if we have a reason
10403 to believe that emit_store_flag will be successful. If we think that
10404 it will, but it isn't, we have to simulate the store-flag with a
10405 set/jump/set sequence. */
10408 do_store_flag (exp, target, mode, only_cheap)
10411 enum machine_mode mode;
10414 enum rtx_code code;
10415 tree arg0, arg1, type;
10417 enum machine_mode operand_mode;
10421 enum insn_code icode;
10422 rtx subtarget = target;
10425 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10426 result at the end. We can't simply invert the test since it would
10427 have already been inverted if it were valid. This case occurs for
10428 some floating-point comparisons. */
10430 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10431 invert = 1, exp = TREE_OPERAND (exp, 0);
10433 arg0 = TREE_OPERAND (exp, 0);
10434 arg1 = TREE_OPERAND (exp, 1);
10436 /* Don't crash if the comparison was erroneous. */
10437 if (arg0 == error_mark_node || arg1 == error_mark_node)
10440 type = TREE_TYPE (arg0);
10441 operand_mode = TYPE_MODE (type);
10442 unsignedp = TREE_UNSIGNED (type);
10444 /* We won't bother with BLKmode store-flag operations because it would mean
10445 passing a lot of information to emit_store_flag. */
10446 if (operand_mode == BLKmode)
10449 /* We won't bother with store-flag operations involving function pointers
10450 when function pointers must be canonicalized before comparisons. */
10451 #ifdef HAVE_canonicalize_funcptr_for_compare
10452 if (HAVE_canonicalize_funcptr_for_compare
10453 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10454 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10456 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10457 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10458 == FUNCTION_TYPE))))
10465 /* Get the rtx comparison code to use. We know that EXP is a comparison
10466 operation of some type. Some comparisons against 1 and -1 can be
10467 converted to comparisons with zero. Do so here so that the tests
10468 below will be aware that we have a comparison with zero. These
10469 tests will not catch constants in the first operand, but constants
10470 are rarely passed as the first operand. */
10472 switch (TREE_CODE (exp))
10481 if (integer_onep (arg1))
10482 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10484 code = unsignedp ? LTU : LT;
10487 if (! unsignedp && integer_all_onesp (arg1))
10488 arg1 = integer_zero_node, code = LT;
10490 code = unsignedp ? LEU : LE;
10493 if (! unsignedp && integer_all_onesp (arg1))
10494 arg1 = integer_zero_node, code = GE;
10496 code = unsignedp ? GTU : GT;
10499 if (integer_onep (arg1))
10500 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10502 code = unsignedp ? GEU : GE;
10505 case UNORDERED_EXPR:
10531 /* Put a constant second. */
10532 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10534 tem = arg0; arg0 = arg1; arg1 = tem;
10535 code = swap_condition (code);
10538 /* If this is an equality or inequality test of a single bit, we can
10539 do this by shifting the bit being tested to the low-order bit and
10540 masking the result with the constant 1. If the condition was EQ,
10541 we xor it with 1. This does not require an scc insn and is faster
10542 than an scc insn even if we have it. */
10544 if ((code == NE || code == EQ)
10545 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10546 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10548 tree inner = TREE_OPERAND (arg0, 0);
10549 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10552 /* If INNER is a right shift of a constant and it plus BITNUM does
10553 not overflow, adjust BITNUM and INNER. */
10555 if (TREE_CODE (inner) == RSHIFT_EXPR
10556 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10557 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10558 && bitnum < TYPE_PRECISION (type)
10559 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10560 bitnum - TYPE_PRECISION (type)))
10562 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10563 inner = TREE_OPERAND (inner, 0);
10566 /* If we are going to be able to omit the AND below, we must do our
10567 operations as unsigned. If we must use the AND, we have a choice.
10568 Normally unsigned is faster, but for some machines signed is. */
10569 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10570 #ifdef LOAD_EXTEND_OP
10571 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10577 if (! get_subtarget (subtarget)
10578 || GET_MODE (subtarget) != operand_mode
10579 || ! safe_from_p (subtarget, inner, 1))
10582 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10585 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10586 size_int (bitnum), subtarget, ops_unsignedp);
10588 if (GET_MODE (op0) != mode)
10589 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10591 if ((code == EQ && ! invert) || (code == NE && invert))
10592 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10593 ops_unsignedp, OPTAB_LIB_WIDEN);
10595 /* Put the AND last so it can combine with more things. */
10596 if (bitnum != TYPE_PRECISION (type) - 1)
10597 op0 = expand_and (op0, const1_rtx, subtarget);
10602 /* Now see if we are likely to be able to do this. Return if not. */
10603 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10606 icode = setcc_gen_code[(int) code];
10607 if (icode == CODE_FOR_nothing
10608 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10610 /* We can only do this if it is one of the special cases that
10611 can be handled without an scc insn. */
10612 if ((code == LT && integer_zerop (arg1))
10613 || (! only_cheap && code == GE && integer_zerop (arg1)))
10615 else if (BRANCH_COST >= 0
10616 && ! only_cheap && (code == NE || code == EQ)
10617 && TREE_CODE (type) != REAL_TYPE
10618 && ((abs_optab->handlers[(int) operand_mode].insn_code
10619 != CODE_FOR_nothing)
10620 || (ffs_optab->handlers[(int) operand_mode].insn_code
10621 != CODE_FOR_nothing)))
10627 if (! get_subtarget (target)
10628 || GET_MODE (subtarget) != operand_mode
10629 || ! safe_from_p (subtarget, arg1, 1))
10632 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10633 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10636 target = gen_reg_rtx (mode);
10638 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10639 because, if the emit_store_flag does anything it will succeed and
10640 OP0 and OP1 will not be used subsequently. */
10642 result = emit_store_flag (target, code,
10643 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10644 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10645 operand_mode, unsignedp, 1);
10650 result = expand_binop (mode, xor_optab, result, const1_rtx,
10651 result, 0, OPTAB_LIB_WIDEN);
10655 /* If this failed, we have to do this with set/compare/jump/set code. */
10656 if (GET_CODE (target) != REG
10657 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10658 target = gen_reg_rtx (GET_MODE (target));
10660 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10661 result = compare_from_rtx (op0, op1, code, unsignedp,
10662 operand_mode, NULL_RTX, 0);
10663 if (GET_CODE (result) == CONST_INT)
10664 return (((result == const0_rtx && ! invert)
10665 || (result != const0_rtx && invert))
10666 ? const0_rtx : const1_rtx);
10668 label = gen_label_rtx ();
10669 if (bcc_gen_fctn[(int) code] == 0)
10672 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10673 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10674 emit_label (label);
10679 /* Generate a tablejump instruction (used for switch statements). */
10681 #ifdef HAVE_tablejump
10683 /* INDEX is the value being switched on, with the lowest value
10684 in the table already subtracted.
10685 MODE is its expected mode (needed if INDEX is constant).
10686 RANGE is the length of the jump table.
10687 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10689 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10690 index value is out of range. */
10693 do_tablejump (index, mode, range, table_label, default_label)
10694 rtx index, range, table_label, default_label;
10695 enum machine_mode mode;
10697 register rtx temp, vector;
10699 /* Do an unsigned comparison (in the proper mode) between the index
10700 expression and the value which represents the length of the range.
10701 Since we just finished subtracting the lower bound of the range
10702 from the index expression, this comparison allows us to simultaneously
10703 check that the original index expression value is both greater than
10704 or equal to the minimum value of the range and less than or equal to
10705 the maximum value of the range. */
10707 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10710 /* If index is in range, it must fit in Pmode.
10711 Convert to Pmode so we can index with it. */
10713 index = convert_to_mode (Pmode, index, 1);
10715 /* Don't let a MEM slip thru, because then INDEX that comes
10716 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10717 and break_out_memory_refs will go to work on it and mess it up. */
10718 #ifdef PIC_CASE_VECTOR_ADDRESS
10719 if (flag_pic && GET_CODE (index) != REG)
10720 index = copy_to_mode_reg (Pmode, index);
10723 /* If flag_force_addr were to affect this address
10724 it could interfere with the tricky assumptions made
10725 about addresses that contain label-refs,
10726 which may be valid only very near the tablejump itself. */
10727 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10728 GET_MODE_SIZE, because this indicates how large insns are. The other
10729 uses should all be Pmode, because they are addresses. This code
10730 could fail if addresses and insns are not the same size. */
10731 index = gen_rtx_PLUS (Pmode,
10732 gen_rtx_MULT (Pmode, index,
10733 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10734 gen_rtx_LABEL_REF (Pmode, table_label));
10735 #ifdef PIC_CASE_VECTOR_ADDRESS
10737 index = PIC_CASE_VECTOR_ADDRESS (index);
10740 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10741 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10742 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10743 RTX_UNCHANGING_P (vector) = 1;
10744 convert_move (temp, vector, 0);
10746 emit_jump_insn (gen_tablejump (temp, table_label));
10748 /* If we are generating PIC code or if the table is PC-relative, the
10749 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10750 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10754 #endif /* HAVE_tablejump */