1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "typeclass.h"
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
63 #define STACK_PUSH_CODE PRE_INC
67 /* Assume that case vectors are not pc-relative. */
68 #ifndef CASE_VECTOR_PC_RELATIVE
69 #define CASE_VECTOR_PC_RELATIVE 0
72 /* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
79 int (*lang_safe_from_p) PARAMS ((rtx, tree));
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* Don't check memory usage, since code is being emitted to check a memory
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
92 static int in_check_memory_usage;
94 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95 static tree placeholder_list = 0;
97 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
114 /* This structure is used by store_by_pieces to describe the clear to
117 struct store_by_pieces
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
130 extern struct obstack permanent_obstack;
132 static rtx get_push_address PARAMS ((int));
134 static rtx enqueue_insn PARAMS ((rtx, rtx));
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
148 struct store_by_pieces *));
149 static rtx get_subtarget PARAMS ((rtx));
150 static int is_zeros_p PARAMS ((tree));
151 static int mostly_zeros_p PARAMS ((tree));
152 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
153 HOST_WIDE_INT, enum machine_mode,
154 tree, tree, unsigned int, int,
156 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
158 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int,
161 unsigned int, HOST_WIDE_INT, int));
162 static enum memory_use_mode
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164 static tree save_noncopied_parts PARAMS ((tree, tree));
165 static tree init_noncopied_parts PARAMS ((tree, tree));
166 static int fixed_type_p PARAMS ((tree));
167 static rtx var_rtx PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
184 /* If a memory-to-memory move would take MOVE_RATIO or more simple
185 move-instruction sequences, we will do a movstr or libcall instead. */
188 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
191 /* If we are optimizing for space (-Os), cut down the default move ratio. */
192 #define MOVE_RATIO (optimize_size ? 3 : 15)
196 /* This macro is used to determine whether move_by_pieces should be called
197 to perform a structure copy. */
198 #ifndef MOVE_BY_PIECES_P
199 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
203 /* This array records the insn_code of insns to perform block moves. */
204 enum insn_code movstr_optab[NUM_MACHINE_MODES];
206 /* This array records the insn_code of insns to perform block clears. */
207 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
209 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
211 #ifndef SLOW_UNALIGNED_ACCESS
212 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
215 /* This is run once per compilation to set up which modes can be used
216 directly in memory and to initialize the block move optab. */
222 enum machine_mode mode;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
235 pat = PATTERN (insn);
237 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
238 mode = (enum machine_mode) ((int) mode + 1))
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
245 PUT_MODE (mem1, mode);
247 /* See if there is some register that can be used in this mode and
248 directly loaded or stored from memory. */
250 if (mode != VOIDmode && mode != BLKmode)
251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
252 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
255 if (! HARD_REGNO_MODE_OK (regno, mode))
258 reg = gen_rtx_REG (mode, regno);
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
265 SET_SRC (pat) = mem1;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
271 SET_DEST (pat) = mem;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
276 SET_DEST (pat) = mem1;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
285 /* This is run at the start of compiling a function. */
290 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
293 pending_stack_adjust = 0;
294 stack_pointer_delta = 0;
295 inhibit_defer_pop = 0;
297 apply_args_value = 0;
303 struct expr_status *p;
308 ggc_mark_rtx (p->x_saveregs_value);
309 ggc_mark_rtx (p->x_apply_args_value);
310 ggc_mark_rtx (p->x_forced_labels);
321 /* Small sanity check that the queue is empty at the end of a function. */
324 finish_expr_for_function ()
330 /* Manage the queue of increment instructions to be output
331 for POSTINCREMENT_EXPR expressions, etc. */
333 /* Queue up to increment (or change) VAR later. BODY says how:
334 BODY should be the same thing you would pass to emit_insn
335 to increment right away. It will go to emit_insn later on.
337 The value is a QUEUED expression to be used in place of VAR
338 where you want to guarantee the pre-incrementation value of VAR. */
341 enqueue_insn (var, body)
344 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
345 body, pending_chain);
346 return pending_chain;
349 /* Use protect_from_queue to convert a QUEUED expression
350 into something that you can put immediately into an instruction.
351 If the queued incrementation has not happened yet,
352 protect_from_queue returns the variable itself.
353 If the incrementation has happened, protect_from_queue returns a temp
354 that contains a copy of the old value of the variable.
356 Any time an rtx which might possibly be a QUEUED is to be put
357 into an instruction, it must be passed through protect_from_queue first.
358 QUEUED expressions are not meaningful in instructions.
360 Do not pass a value through protect_from_queue and then hold
361 on to it for a while before putting it in an instruction!
362 If the queue is flushed in between, incorrect code will result. */
365 protect_from_queue (x, modify)
369 register RTX_CODE code = GET_CODE (x);
371 #if 0 /* A QUEUED can hang around after the queue is forced out. */
372 /* Shortcut for most common case. */
373 if (pending_chain == 0)
379 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
380 use of autoincrement. Make a copy of the contents of the memory
381 location rather than a copy of the address, but not if the value is
382 of mode BLKmode. Don't modify X in place since it might be
384 if (code == MEM && GET_MODE (x) != BLKmode
385 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
387 register rtx y = XEXP (x, 0);
388 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
390 MEM_COPY_ATTRIBUTES (new, x);
394 register rtx temp = gen_reg_rtx (GET_MODE (new));
395 emit_insn_before (gen_move_insn (temp, new),
399 /* Copy the address into a pseudo, so that the returned value
400 remains correct across calls to emit_queue. */
401 XEXP (new, 0) = copy_to_reg (XEXP (new, 0));
404 /* Otherwise, recursively protect the subexpressions of all
405 the kinds of rtx's that can contain a QUEUED. */
408 rtx tem = protect_from_queue (XEXP (x, 0), 0);
409 if (tem != XEXP (x, 0))
415 else if (code == PLUS || code == MULT)
417 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
418 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
419 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
428 /* If the increment has not happened, use the variable itself. Copy it
429 into a new pseudo so that the value remains correct across calls to
431 if (QUEUED_INSN (x) == 0)
432 return copy_to_reg (QUEUED_VAR (x));
433 /* If the increment has happened and a pre-increment copy exists,
435 if (QUEUED_COPY (x) != 0)
436 return QUEUED_COPY (x);
437 /* The increment has happened but we haven't set up a pre-increment copy.
438 Set one up now, and use it. */
439 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
440 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
442 return QUEUED_COPY (x);
445 /* Return nonzero if X contains a QUEUED expression:
446 if it contains anything that will be altered by a queued increment.
447 We handle only combinations of MEM, PLUS, MINUS and MULT operators
448 since memory addresses generally contain only those. */
454 register enum rtx_code code = GET_CODE (x);
460 return queued_subexp_p (XEXP (x, 0));
464 return (queued_subexp_p (XEXP (x, 0))
465 || queued_subexp_p (XEXP (x, 1)));
471 /* Perform all the pending incrementations. */
477 while ((p = pending_chain))
479 rtx body = QUEUED_BODY (p);
481 if (GET_CODE (body) == SEQUENCE)
483 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
484 emit_insn (QUEUED_BODY (p));
487 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
488 pending_chain = QUEUED_NEXT (p);
492 /* Copy data from FROM to TO, where the machine modes are not the same.
493 Both modes may be integer, or both may be floating.
494 UNSIGNEDP should be nonzero if FROM is an unsigned type.
495 This causes zero-extension instead of sign-extension. */
498 convert_move (to, from, unsignedp)
499 register rtx to, from;
502 enum machine_mode to_mode = GET_MODE (to);
503 enum machine_mode from_mode = GET_MODE (from);
504 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
505 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
509 /* rtx code for making an equivalent value. */
510 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
512 to = protect_from_queue (to, 1);
513 from = protect_from_queue (from, 0);
515 if (to_real != from_real)
518 /* If FROM is a SUBREG that indicates that we have already done at least
519 the required extension, strip it. We don't handle such SUBREGs as
522 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
523 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
524 >= GET_MODE_SIZE (to_mode))
525 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
526 from = gen_lowpart (to_mode, from), from_mode = to_mode;
528 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
531 if (to_mode == from_mode
532 || (from_mode == VOIDmode && CONSTANT_P (from)))
534 emit_move_insn (to, from);
538 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
540 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
543 if (VECTOR_MODE_P (to_mode))
544 from = gen_rtx_SUBREG (to_mode, from, 0);
546 to = gen_rtx_SUBREG (from_mode, to, 0);
548 emit_move_insn (to, from);
552 if (to_real != from_real)
559 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
561 /* Try converting directly if the insn is supported. */
562 if ((code = can_extend_p (to_mode, from_mode, 0))
565 emit_unop_insn (code, to, from, UNKNOWN);
570 #ifdef HAVE_trunchfqf2
571 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
573 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
577 #ifdef HAVE_trunctqfqf2
578 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
580 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
584 #ifdef HAVE_truncsfqf2
585 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
587 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
591 #ifdef HAVE_truncdfqf2
592 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
594 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
598 #ifdef HAVE_truncxfqf2
599 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
601 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
605 #ifdef HAVE_trunctfqf2
606 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
608 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
613 #ifdef HAVE_trunctqfhf2
614 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
616 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
620 #ifdef HAVE_truncsfhf2
621 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
623 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
627 #ifdef HAVE_truncdfhf2
628 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
630 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
634 #ifdef HAVE_truncxfhf2
635 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
637 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
641 #ifdef HAVE_trunctfhf2
642 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
644 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
649 #ifdef HAVE_truncsftqf2
650 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
652 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
656 #ifdef HAVE_truncdftqf2
657 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
659 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
663 #ifdef HAVE_truncxftqf2
664 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
666 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
670 #ifdef HAVE_trunctftqf2
671 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
673 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
678 #ifdef HAVE_truncdfsf2
679 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
681 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
685 #ifdef HAVE_truncxfsf2
686 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
688 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
692 #ifdef HAVE_trunctfsf2
693 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
695 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
699 #ifdef HAVE_truncxfdf2
700 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
702 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
706 #ifdef HAVE_trunctfdf2
707 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
709 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
721 libcall = extendsfdf2_libfunc;
725 libcall = extendsfxf2_libfunc;
729 libcall = extendsftf2_libfunc;
741 libcall = truncdfsf2_libfunc;
745 libcall = extenddfxf2_libfunc;
749 libcall = extenddftf2_libfunc;
761 libcall = truncxfsf2_libfunc;
765 libcall = truncxfdf2_libfunc;
777 libcall = trunctfsf2_libfunc;
781 libcall = trunctfdf2_libfunc;
793 if (libcall == (rtx) 0)
794 /* This conversion is not implemented yet. */
798 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
800 insns = get_insns ();
802 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
807 /* Now both modes are integers. */
809 /* Handle expanding beyond a word. */
810 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
811 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
818 enum machine_mode lowpart_mode;
819 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
821 /* Try converting directly if the insn is supported. */
822 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
825 /* If FROM is a SUBREG, put it into a register. Do this
826 so that we always generate the same set of insns for
827 better cse'ing; if an intermediate assignment occurred,
828 we won't be doing the operation directly on the SUBREG. */
829 if (optimize > 0 && GET_CODE (from) == SUBREG)
830 from = force_reg (from_mode, from);
831 emit_unop_insn (code, to, from, equiv_code);
834 /* Next, try converting via full word. */
835 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
836 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
837 != CODE_FOR_nothing))
839 if (GET_CODE (to) == REG)
840 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
841 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
842 emit_unop_insn (code, to,
843 gen_lowpart (word_mode, to), equiv_code);
847 /* No special multiword conversion insn; do it by hand. */
850 /* Since we will turn this into a no conflict block, we must ensure
851 that the source does not overlap the target. */
853 if (reg_overlap_mentioned_p (to, from))
854 from = force_reg (from_mode, from);
856 /* Get a copy of FROM widened to a word, if necessary. */
857 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
858 lowpart_mode = word_mode;
860 lowpart_mode = from_mode;
862 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
864 lowpart = gen_lowpart (lowpart_mode, to);
865 emit_move_insn (lowpart, lowfrom);
867 /* Compute the value to put in each remaining word. */
869 fill_value = const0_rtx;
874 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
875 && STORE_FLAG_VALUE == -1)
877 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
879 fill_value = gen_reg_rtx (word_mode);
880 emit_insn (gen_slt (fill_value));
886 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
887 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
889 fill_value = convert_to_mode (word_mode, fill_value, 1);
893 /* Fill the remaining words. */
894 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
896 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
897 rtx subword = operand_subword (to, index, 1, to_mode);
902 if (fill_value != subword)
903 emit_move_insn (subword, fill_value);
906 insns = get_insns ();
909 emit_no_conflict_block (insns, to, from, NULL_RTX,
910 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
914 /* Truncating multi-word to a word or less. */
915 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
916 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
918 if (!((GET_CODE (from) == MEM
919 && ! MEM_VOLATILE_P (from)
920 && direct_load[(int) to_mode]
921 && ! mode_dependent_address_p (XEXP (from, 0)))
922 || GET_CODE (from) == REG
923 || GET_CODE (from) == SUBREG))
924 from = force_reg (from_mode, from);
925 convert_move (to, gen_lowpart (word_mode, from), 0);
929 /* Handle pointer conversion. */ /* SPEE 900220. */
930 if (to_mode == PQImode)
932 if (from_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
935 #ifdef HAVE_truncqipqi2
936 if (HAVE_truncqipqi2)
938 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
941 #endif /* HAVE_truncqipqi2 */
945 if (from_mode == PQImode)
947 if (to_mode != QImode)
949 from = convert_to_mode (QImode, from, unsignedp);
954 #ifdef HAVE_extendpqiqi2
955 if (HAVE_extendpqiqi2)
957 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
960 #endif /* HAVE_extendpqiqi2 */
965 if (to_mode == PSImode)
967 if (from_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
970 #ifdef HAVE_truncsipsi2
971 if (HAVE_truncsipsi2)
973 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
976 #endif /* HAVE_truncsipsi2 */
980 if (from_mode == PSImode)
982 if (to_mode != SImode)
984 from = convert_to_mode (SImode, from, unsignedp);
989 #ifdef HAVE_extendpsisi2
990 if (! unsignedp && HAVE_extendpsisi2)
992 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
995 #endif /* HAVE_extendpsisi2 */
996 #ifdef HAVE_zero_extendpsisi2
997 if (unsignedp && HAVE_zero_extendpsisi2)
999 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1002 #endif /* HAVE_zero_extendpsisi2 */
1007 if (to_mode == PDImode)
1009 if (from_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1012 #ifdef HAVE_truncdipdi2
1013 if (HAVE_truncdipdi2)
1015 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1018 #endif /* HAVE_truncdipdi2 */
1022 if (from_mode == PDImode)
1024 if (to_mode != DImode)
1026 from = convert_to_mode (DImode, from, unsignedp);
1031 #ifdef HAVE_extendpdidi2
1032 if (HAVE_extendpdidi2)
1034 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1037 #endif /* HAVE_extendpdidi2 */
1042 /* Now follow all the conversions between integers
1043 no more than a word long. */
1045 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1046 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1047 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1048 GET_MODE_BITSIZE (from_mode)))
1050 if (!((GET_CODE (from) == MEM
1051 && ! MEM_VOLATILE_P (from)
1052 && direct_load[(int) to_mode]
1053 && ! mode_dependent_address_p (XEXP (from, 0)))
1054 || GET_CODE (from) == REG
1055 || GET_CODE (from) == SUBREG))
1056 from = force_reg (from_mode, from);
1057 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1058 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1059 from = copy_to_reg (from);
1060 emit_move_insn (to, gen_lowpart (to_mode, from));
1064 /* Handle extension. */
1065 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1067 /* Convert directly if that works. */
1068 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1069 != CODE_FOR_nothing)
1071 emit_unop_insn (code, to, from, equiv_code);
1076 enum machine_mode intermediate;
1080 /* Search for a mode to convert via. */
1081 for (intermediate = from_mode; intermediate != VOIDmode;
1082 intermediate = GET_MODE_WIDER_MODE (intermediate))
1083 if (((can_extend_p (to_mode, intermediate, unsignedp)
1084 != CODE_FOR_nothing)
1085 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1086 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1087 GET_MODE_BITSIZE (intermediate))))
1088 && (can_extend_p (intermediate, from_mode, unsignedp)
1089 != CODE_FOR_nothing))
1091 convert_move (to, convert_to_mode (intermediate, from,
1092 unsignedp), unsignedp);
1096 /* No suitable intermediate mode.
1097 Generate what we need with shifts. */
1098 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1099 - GET_MODE_BITSIZE (from_mode), 0);
1100 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1101 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1103 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1106 emit_move_insn (to, tmp);
1111 /* Support special truncate insns for certain modes. */
1113 if (from_mode == DImode && to_mode == SImode)
1115 #ifdef HAVE_truncdisi2
1116 if (HAVE_truncdisi2)
1118 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 if (from_mode == DImode && to_mode == HImode)
1128 #ifdef HAVE_truncdihi2
1129 if (HAVE_truncdihi2)
1131 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 if (from_mode == DImode && to_mode == QImode)
1141 #ifdef HAVE_truncdiqi2
1142 if (HAVE_truncdiqi2)
1144 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 if (from_mode == SImode && to_mode == HImode)
1154 #ifdef HAVE_truncsihi2
1155 if (HAVE_truncsihi2)
1157 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 if (from_mode == SImode && to_mode == QImode)
1167 #ifdef HAVE_truncsiqi2
1168 if (HAVE_truncsiqi2)
1170 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 if (from_mode == HImode && to_mode == QImode)
1180 #ifdef HAVE_trunchiqi2
1181 if (HAVE_trunchiqi2)
1183 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 if (from_mode == TImode && to_mode == DImode)
1193 #ifdef HAVE_trunctidi2
1194 if (HAVE_trunctidi2)
1196 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 if (from_mode == TImode && to_mode == SImode)
1206 #ifdef HAVE_trunctisi2
1207 if (HAVE_trunctisi2)
1209 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 if (from_mode == TImode && to_mode == HImode)
1219 #ifdef HAVE_trunctihi2
1220 if (HAVE_trunctihi2)
1222 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 if (from_mode == TImode && to_mode == QImode)
1232 #ifdef HAVE_trunctiqi2
1233 if (HAVE_trunctiqi2)
1235 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1239 convert_move (to, force_reg (from_mode, from), unsignedp);
1243 /* Handle truncation of volatile memrefs, and so on;
1244 the things that couldn't be truncated directly,
1245 and for which there was no special instruction. */
1246 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1248 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1249 emit_move_insn (to, temp);
1253 /* Mode combination is not recognized. */
1257 /* Return an rtx for a value that would result
1258 from converting X to mode MODE.
1259 Both X and MODE may be floating, or both integer.
1260 UNSIGNEDP is nonzero if X is an unsigned value.
1261 This can be done by referring to a part of X in place
1262 or by copying to a new temporary with conversion.
1264 This function *must not* call protect_from_queue
1265 except when putting X into an insn (in which case convert_move does it). */
1268 convert_to_mode (mode, x, unsignedp)
1269 enum machine_mode mode;
1273 return convert_modes (mode, VOIDmode, x, unsignedp);
1276 /* Return an rtx for a value that would result
1277 from converting X from mode OLDMODE to mode MODE.
1278 Both modes may be floating, or both integer.
1279 UNSIGNEDP is nonzero if X is an unsigned value.
1281 This can be done by referring to a part of X in place
1282 or by copying to a new temporary with conversion.
1284 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1286 This function *must not* call protect_from_queue
1287 except when putting X into an insn (in which case convert_move does it). */
1290 convert_modes (mode, oldmode, x, unsignedp)
1291 enum machine_mode mode, oldmode;
1297 /* If FROM is a SUBREG that indicates that we have already done at least
1298 the required extension, strip it. */
1300 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1301 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1302 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1303 x = gen_lowpart (mode, x);
1305 if (GET_MODE (x) != VOIDmode)
1306 oldmode = GET_MODE (x);
1308 if (mode == oldmode)
1311 /* There is one case that we must handle specially: If we are converting
1312 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1313 we are to interpret the constant as unsigned, gen_lowpart will do
1314 the wrong if the constant appears negative. What we want to do is
1315 make the high-order word of the constant zero, not all ones. */
1317 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1319 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1321 HOST_WIDE_INT val = INTVAL (x);
1323 if (oldmode != VOIDmode
1324 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1326 int width = GET_MODE_BITSIZE (oldmode);
1328 /* We need to zero extend VAL. */
1329 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1332 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1335 /* We can do this with a gen_lowpart if both desired and current modes
1336 are integer, and this is either a constant integer, a register, or a
1337 non-volatile MEM. Except for the constant case where MODE is no
1338 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1340 if ((GET_CODE (x) == CONST_INT
1341 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1342 || (GET_MODE_CLASS (mode) == MODE_INT
1343 && GET_MODE_CLASS (oldmode) == MODE_INT
1344 && (GET_CODE (x) == CONST_DOUBLE
1345 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1346 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1347 && direct_load[(int) mode])
1348 || (GET_CODE (x) == REG
1349 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1350 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1352 /* ?? If we don't know OLDMODE, we have to assume here that
1353 X does not need sign- or zero-extension. This may not be
1354 the case, but it's the best we can do. */
1355 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1356 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1358 HOST_WIDE_INT val = INTVAL (x);
1359 int width = GET_MODE_BITSIZE (oldmode);
1361 /* We must sign or zero-extend in this case. Start by
1362 zero-extending, then sign extend if we need to. */
1363 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1365 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1366 val |= (HOST_WIDE_INT) (-1) << width;
1368 return GEN_INT (trunc_int_for_mode (val, mode));
1371 return gen_lowpart (mode, x);
1374 temp = gen_reg_rtx (mode);
1375 convert_move (temp, x, unsignedp);
1379 /* This macro is used to determine what the largest unit size that
1380 move_by_pieces can use is. */
1382 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1383 move efficiently, as opposed to MOVE_MAX which is the maximum
1384 number of bytes we can move with a single instruction. */
1386 #ifndef MOVE_MAX_PIECES
1387 #define MOVE_MAX_PIECES MOVE_MAX
1390 /* Generate several move instructions to copy LEN bytes
1391 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1392 The caller must pass FROM and TO
1393 through protect_from_queue before calling.
1395 When TO is NULL, the emit_single_push_insn is used to push the
1398 ALIGN is maximum alignment we can assume. */
1401 move_by_pieces (to, from, len, align)
1403 unsigned HOST_WIDE_INT len;
1406 struct move_by_pieces data;
1407 rtx to_addr, from_addr = XEXP (from, 0);
1408 unsigned int max_size = MOVE_MAX_PIECES + 1;
1409 enum machine_mode mode = VOIDmode, tmode;
1410 enum insn_code icode;
1413 data.from_addr = from_addr;
1416 to_addr = XEXP (to, 0);
1419 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1420 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1429 #ifdef STACK_GROWS_DOWNWARD
1435 data.to_addr = to_addr;
1438 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1439 || GET_CODE (from_addr) == POST_INC
1440 || GET_CODE (from_addr) == POST_DEC);
1442 data.explicit_inc_from = 0;
1443 data.explicit_inc_to = 0;
1444 if (data.reverse) data.offset = len;
1447 /* If copying requires more than two move insns,
1448 copy addresses to registers (to make displacements shorter)
1449 and use post-increment if available. */
1450 if (!(data.autinc_from && data.autinc_to)
1451 && move_by_pieces_ninsns (len, align) > 2)
1453 /* Find the mode of the largest move... */
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
1459 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1461 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1462 data.autinc_from = 1;
1463 data.explicit_inc_from = -1;
1465 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1467 data.from_addr = copy_addr_to_reg (from_addr);
1468 data.autinc_from = 1;
1469 data.explicit_inc_from = 1;
1471 if (!data.autinc_from && CONSTANT_P (from_addr))
1472 data.from_addr = copy_addr_to_reg (from_addr);
1473 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1475 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1477 data.explicit_inc_to = -1;
1479 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1481 data.to_addr = copy_addr_to_reg (to_addr);
1483 data.explicit_inc_to = 1;
1485 if (!data.autinc_to && CONSTANT_P (to_addr))
1486 data.to_addr = copy_addr_to_reg (to_addr);
1489 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1490 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1491 align = MOVE_MAX * BITS_PER_UNIT;
1493 /* First move what we can in the largest integer mode, then go to
1494 successively smaller modes. */
1496 while (max_size > 1)
1498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1500 if (GET_MODE_SIZE (tmode) < max_size)
1503 if (mode == VOIDmode)
1506 icode = mov_optab->handlers[(int) mode].insn_code;
1507 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1508 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1510 max_size = GET_MODE_SIZE (mode);
1513 /* The code above should have handled everything. */
1518 /* Return number of insns required to move L bytes by pieces.
1519 ALIGN (in bits) is maximum alignment we can assume. */
1521 static unsigned HOST_WIDE_INT
1522 move_by_pieces_ninsns (l, align)
1523 unsigned HOST_WIDE_INT l;
1526 unsigned HOST_WIDE_INT n_insns = 0;
1527 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1529 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1530 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1531 align = MOVE_MAX * BITS_PER_UNIT;
1533 while (max_size > 1)
1535 enum machine_mode mode = VOIDmode, tmode;
1536 enum insn_code icode;
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
1543 if (mode == VOIDmode)
1546 icode = mov_optab->handlers[(int) mode].insn_code;
1547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1548 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1550 max_size = GET_MODE_SIZE (mode);
1558 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1559 with move instructions for mode MODE. GENFUN is the gen_... function
1560 to make a move insn for that mode. DATA has all the other info. */
1563 move_by_pieces_1 (genfun, mode, data)
1564 rtx (*genfun) PARAMS ((rtx, ...));
1565 enum machine_mode mode;
1566 struct move_by_pieces *data;
1568 unsigned int size = GET_MODE_SIZE (mode);
1571 while (data->len >= size)
1574 data->offset -= size;
1578 if (data->autinc_to)
1580 to1 = gen_rtx_MEM (mode, data->to_addr);
1581 MEM_COPY_ATTRIBUTES (to1, data->to);
1584 to1 = change_address (data->to, mode,
1585 plus_constant (data->to_addr, data->offset));
1588 if (data->autinc_from)
1590 from1 = gen_rtx_MEM (mode, data->from_addr);
1591 MEM_COPY_ATTRIBUTES (from1, data->from);
1594 from1 = change_address (data->from, mode,
1595 plus_constant (data->from_addr, data->offset));
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1598 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1599 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1600 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1603 emit_insn ((*genfun) (to1, from1));
1605 emit_single_push_insn (mode, from1, NULL);
1607 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1608 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1609 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1610 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1612 if (! data->reverse)
1613 data->offset += size;
1619 /* Emit code to move a block Y to a block X.
1620 This may be done with string-move instructions,
1621 with multiple scalar move instructions, or with a library call.
1623 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1625 SIZE is an rtx that says how long they are.
1626 ALIGN is the maximum alignment we can assume they have.
1628 Return the address of the new block, if memcpy is called and returns it,
1632 emit_block_move (x, y, size, align)
1638 #ifdef TARGET_MEM_FUNCTIONS
1640 tree call_expr, arg_list;
1643 if (GET_MODE (x) != BLKmode)
1646 if (GET_MODE (y) != BLKmode)
1649 x = protect_from_queue (x, 1);
1650 y = protect_from_queue (y, 0);
1651 size = protect_from_queue (size, 0);
1653 if (GET_CODE (x) != MEM)
1655 if (GET_CODE (y) != MEM)
1660 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1661 move_by_pieces (x, y, INTVAL (size), align);
1664 /* Try the most limited insn first, because there's no point
1665 including more than one in the machine description unless
1666 the more limited one has some advantage. */
1668 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1669 enum machine_mode mode;
1671 /* Since this is a move insn, we don't care about volatility. */
1674 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1675 mode = GET_MODE_WIDER_MODE (mode))
1677 enum insn_code code = movstr_optab[(int) mode];
1678 insn_operand_predicate_fn pred;
1680 if (code != CODE_FOR_nothing
1681 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1682 here because if SIZE is less than the mode mask, as it is
1683 returned by the macro, it will definitely be less than the
1684 actual mode mask. */
1685 && ((GET_CODE (size) == CONST_INT
1686 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1687 <= (GET_MODE_MASK (mode) >> 1)))
1688 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1689 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1690 || (*pred) (x, BLKmode))
1691 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1692 || (*pred) (y, BLKmode))
1693 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1694 || (*pred) (opalign, VOIDmode)))
1697 rtx last = get_last_insn ();
1700 op2 = convert_to_mode (mode, size, 1);
1701 pred = insn_data[(int) code].operand[2].predicate;
1702 if (pred != 0 && ! (*pred) (op2, mode))
1703 op2 = copy_to_mode_reg (mode, op2);
1705 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1713 delete_insns_since (last);
1719 /* X, Y, or SIZE may have been passed through protect_from_queue.
1721 It is unsafe to save the value generated by protect_from_queue
1722 and reuse it later. Consider what happens if emit_queue is
1723 called before the return value from protect_from_queue is used.
1725 Expansion of the CALL_EXPR below will call emit_queue before
1726 we are finished emitting RTL for argument setup. So if we are
1727 not careful we could get the wrong value for an argument.
1729 To avoid this problem we go ahead and emit code to copy X, Y &
1730 SIZE into new pseudos. We can then place those new pseudos
1731 into an RTL_EXPR and use them later, even after a call to
1734 Note this is not strictly needed for library calls since they
1735 do not call emit_queue before loading their arguments. However,
1736 we may need to have library calls call emit_queue in the future
1737 since failing to do so could cause problems for targets which
1738 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1739 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1740 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1742 #ifdef TARGET_MEM_FUNCTIONS
1743 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1745 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1746 TREE_UNSIGNED (integer_type_node));
1747 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1750 #ifdef TARGET_MEM_FUNCTIONS
1751 /* It is incorrect to use the libcall calling conventions to call
1752 memcpy in this context.
1754 This could be a user call to memcpy and the user may wish to
1755 examine the return value from memcpy.
1757 For targets where libcalls and normal calls have different conventions
1758 for returning pointers, we could end up generating incorrect code.
1760 So instead of using a libcall sequence we build up a suitable
1761 CALL_EXPR and expand the call in the normal fashion. */
1762 if (fn == NULL_TREE)
1766 /* This was copied from except.c, I don't know if all this is
1767 necessary in this context or not. */
1768 fn = get_identifier ("memcpy");
1769 fntype = build_pointer_type (void_type_node);
1770 fntype = build_function_type (fntype, NULL_TREE);
1771 fn = build_decl (FUNCTION_DECL, fn, fntype);
1772 ggc_add_tree_root (&fn, 1);
1773 DECL_EXTERNAL (fn) = 1;
1774 TREE_PUBLIC (fn) = 1;
1775 DECL_ARTIFICIAL (fn) = 1;
1776 make_decl_rtl (fn, NULL);
1777 assemble_external (fn);
1780 /* We need to make an argument list for the function call.
1782 memcpy has three arguments, the first two are void * addresses and
1783 the last is a size_t byte count for the copy. */
1785 = build_tree_list (NULL_TREE,
1786 make_tree (build_pointer_type (void_type_node), x));
1787 TREE_CHAIN (arg_list)
1788 = build_tree_list (NULL_TREE,
1789 make_tree (build_pointer_type (void_type_node), y));
1790 TREE_CHAIN (TREE_CHAIN (arg_list))
1791 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1792 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1794 /* Now we have to build up the CALL_EXPR itself. */
1795 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1796 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1797 call_expr, arg_list, NULL_TREE);
1798 TREE_SIDE_EFFECTS (call_expr) = 1;
1800 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1802 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1803 VOIDmode, 3, y, Pmode, x, Pmode,
1804 convert_to_mode (TYPE_MODE (integer_type_node), size,
1805 TREE_UNSIGNED (integer_type_node)),
1806 TYPE_MODE (integer_type_node));
1813 /* Copy all or part of a value X into registers starting at REGNO.
1814 The number of registers to be filled is NREGS. */
1817 move_block_to_reg (regno, x, nregs, mode)
1821 enum machine_mode mode;
1824 #ifdef HAVE_load_multiple
1832 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1833 x = validize_mem (force_const_mem (mode, x));
1835 /* See if the machine can do this with a load multiple insn. */
1836 #ifdef HAVE_load_multiple
1837 if (HAVE_load_multiple)
1839 last = get_last_insn ();
1840 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1848 delete_insns_since (last);
1852 for (i = 0; i < nregs; i++)
1853 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1854 operand_subword_force (x, i, mode));
1857 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1858 The number of registers to be filled is NREGS. SIZE indicates the number
1859 of bytes in the object X. */
1862 move_block_from_reg (regno, x, nregs, size)
1869 #ifdef HAVE_store_multiple
1873 enum machine_mode mode;
1878 /* If SIZE is that of a mode no bigger than a word, just use that
1879 mode's store operation. */
1880 if (size <= UNITS_PER_WORD
1881 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1883 emit_move_insn (change_address (x, mode, NULL),
1884 gen_rtx_REG (mode, regno));
1888 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1889 to the left before storing to memory. Note that the previous test
1890 doesn't handle all cases (e.g. SIZE == 3). */
1891 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1893 rtx tem = operand_subword (x, 0, 1, BLKmode);
1899 shift = expand_shift (LSHIFT_EXPR, word_mode,
1900 gen_rtx_REG (word_mode, regno),
1901 build_int_2 ((UNITS_PER_WORD - size)
1902 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1903 emit_move_insn (tem, shift);
1907 /* See if the machine can do this with a store multiple insn. */
1908 #ifdef HAVE_store_multiple
1909 if (HAVE_store_multiple)
1911 last = get_last_insn ();
1912 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1920 delete_insns_since (last);
1924 for (i = 0; i < nregs; i++)
1926 rtx tem = operand_subword (x, i, 1, BLKmode);
1931 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1935 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1936 registers represented by a PARALLEL. SSIZE represents the total size of
1937 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1939 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1940 the balance will be in what would be the low-order memory addresses, i.e.
1941 left justified for big endian, right justified for little endian. This
1942 happens to be true for the targets currently using this support. If this
1943 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1947 emit_group_load (dst, orig_src, ssize, align)
1955 if (GET_CODE (dst) != PARALLEL)
1958 /* Check for a NULL entry, used to indicate that the parameter goes
1959 both on the stack and in registers. */
1960 if (XEXP (XVECEXP (dst, 0, 0), 0))
1965 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1967 /* Process the pieces. */
1968 for (i = start; i < XVECLEN (dst, 0); i++)
1970 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1971 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1972 unsigned int bytelen = GET_MODE_SIZE (mode);
1975 /* Handle trailing fragments that run over the size of the struct. */
1976 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1978 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1979 bytelen = ssize - bytepos;
1984 /* If we won't be loading directly from memory, protect the real source
1985 from strange tricks we might play; but make sure that the source can
1986 be loaded directly into the destination. */
1988 if (GET_CODE (orig_src) != MEM
1989 && (!CONSTANT_P (orig_src)
1990 || (GET_MODE (orig_src) != mode
1991 && GET_MODE (orig_src) != VOIDmode)))
1993 if (GET_MODE (orig_src) == VOIDmode)
1994 src = gen_reg_rtx (mode);
1996 src = gen_reg_rtx (GET_MODE (orig_src));
1997 emit_move_insn (src, orig_src);
2000 /* Optimize the access just a bit. */
2001 if (GET_CODE (src) == MEM
2002 && align >= GET_MODE_ALIGNMENT (mode)
2003 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2004 && bytelen == GET_MODE_SIZE (mode))
2006 tmps[i] = gen_reg_rtx (mode);
2007 emit_move_insn (tmps[i],
2008 change_address (src, mode,
2009 plus_constant (XEXP (src, 0),
2012 else if (GET_CODE (src) == CONCAT)
2015 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2016 tmps[i] = XEXP (src, 0);
2017 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2018 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2019 tmps[i] = XEXP (src, 1);
2023 else if (CONSTANT_P (src)
2024 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2027 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2028 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2029 mode, mode, align, ssize);
2031 if (BYTES_BIG_ENDIAN && shift)
2032 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2033 tmps[i], 0, OPTAB_WIDEN);
2038 /* Copy the extracted pieces into the proper (probable) hard regs. */
2039 for (i = start; i < XVECLEN (dst, 0); i++)
2040 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2043 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2044 registers represented by a PARALLEL. SSIZE represents the total size of
2045 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2048 emit_group_store (orig_dst, src, ssize, align)
2056 if (GET_CODE (src) != PARALLEL)
2059 /* Check for a NULL entry, used to indicate that the parameter goes
2060 both on the stack and in registers. */
2061 if (XEXP (XVECEXP (src, 0, 0), 0))
2066 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2068 /* Copy the (probable) hard regs into pseudos. */
2069 for (i = start; i < XVECLEN (src, 0); i++)
2071 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2072 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2073 emit_move_insn (tmps[i], reg);
2077 /* If we won't be storing directly into memory, protect the real destination
2078 from strange tricks we might play. */
2080 if (GET_CODE (dst) == PARALLEL)
2084 /* We can get a PARALLEL dst if there is a conditional expression in
2085 a return statement. In that case, the dst and src are the same,
2086 so no action is necessary. */
2087 if (rtx_equal_p (dst, src))
2090 /* It is unclear if we can ever reach here, but we may as well handle
2091 it. Allocate a temporary, and split this into a store/load to/from
2094 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2095 emit_group_store (temp, src, ssize, align);
2096 emit_group_load (dst, temp, ssize, align);
2099 else if (GET_CODE (dst) != MEM)
2101 dst = gen_reg_rtx (GET_MODE (orig_dst));
2102 /* Make life a bit easier for combine. */
2103 emit_move_insn (dst, const0_rtx);
2106 /* Process the pieces. */
2107 for (i = start; i < XVECLEN (src, 0); i++)
2109 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2110 enum machine_mode mode = GET_MODE (tmps[i]);
2111 unsigned int bytelen = GET_MODE_SIZE (mode);
2113 /* Handle trailing fragments that run over the size of the struct. */
2114 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2116 if (BYTES_BIG_ENDIAN)
2118 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2119 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2120 tmps[i], 0, OPTAB_WIDEN);
2122 bytelen = ssize - bytepos;
2125 /* Optimize the access just a bit. */
2126 if (GET_CODE (dst) == MEM
2127 && align >= GET_MODE_ALIGNMENT (mode)
2128 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2129 && bytelen == GET_MODE_SIZE (mode))
2130 emit_move_insn (change_address (dst, mode,
2131 plus_constant (XEXP (dst, 0),
2135 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2136 mode, tmps[i], align, ssize);
2141 /* Copy from the pseudo into the (probable) hard reg. */
2142 if (GET_CODE (dst) == REG)
2143 emit_move_insn (orig_dst, dst);
2146 /* Generate code to copy a BLKmode object of TYPE out of a
2147 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2148 is null, a stack temporary is created. TGTBLK is returned.
2150 The primary purpose of this routine is to handle functions
2151 that return BLKmode structures in registers. Some machines
2152 (the PA for example) want to return all small structures
2153 in registers regardless of the structure's alignment. */
2156 copy_blkmode_from_reg (tgtblk, srcreg, type)
2161 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2162 rtx src = NULL, dst = NULL;
2163 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2164 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2168 tgtblk = assign_temp (build_qualified_type (type,
2170 | TYPE_QUAL_CONST)),
2172 preserve_temp_slots (tgtblk);
2175 /* This code assumes srcreg is at least a full word. If it isn't,
2176 copy it into a new pseudo which is a full word. */
2177 if (GET_MODE (srcreg) != BLKmode
2178 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2179 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2181 /* Structures whose size is not a multiple of a word are aligned
2182 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2183 machine, this means we must skip the empty high order bytes when
2184 calculating the bit offset. */
2185 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2186 big_endian_correction
2187 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2189 /* Copy the structure BITSIZE bites at a time.
2191 We could probably emit more efficient code for machines which do not use
2192 strict alignment, but it doesn't seem worth the effort at the current
2194 for (bitpos = 0, xbitpos = big_endian_correction;
2195 bitpos < bytes * BITS_PER_UNIT;
2196 bitpos += bitsize, xbitpos += bitsize)
2198 /* We need a new source operand each time xbitpos is on a
2199 word boundary and when xbitpos == big_endian_correction
2200 (the first time through). */
2201 if (xbitpos % BITS_PER_WORD == 0
2202 || xbitpos == big_endian_correction)
2203 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2205 /* We need a new destination operand each time bitpos is on
2207 if (bitpos % BITS_PER_WORD == 0)
2208 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2210 /* Use xbitpos for the source extraction (right justified) and
2211 xbitpos for the destination store (left justified). */
2212 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2213 extract_bit_field (src, bitsize,
2214 xbitpos % BITS_PER_WORD, 1,
2215 NULL_RTX, word_mode, word_mode,
2216 bitsize, BITS_PER_WORD),
2217 bitsize, BITS_PER_WORD);
2223 /* Add a USE expression for REG to the (possibly empty) list pointed
2224 to by CALL_FUSAGE. REG must denote a hard register. */
2227 use_reg (call_fusage, reg)
2228 rtx *call_fusage, reg;
2230 if (GET_CODE (reg) != REG
2231 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2235 = gen_rtx_EXPR_LIST (VOIDmode,
2236 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2239 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2240 starting at REGNO. All of these registers must be hard registers. */
2243 use_regs (call_fusage, regno, nregs)
2250 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2253 for (i = 0; i < nregs; i++)
2254 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2257 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2258 PARALLEL REGS. This is for calls that pass values in multiple
2259 non-contiguous locations. The Irix 6 ABI has examples of this. */
2262 use_group_regs (call_fusage, regs)
2268 for (i = 0; i < XVECLEN (regs, 0); i++)
2270 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2272 /* A NULL entry means the parameter goes both on the stack and in
2273 registers. This can also be a MEM for targets that pass values
2274 partially on the stack and partially in registers. */
2275 if (reg != 0 && GET_CODE (reg) == REG)
2276 use_reg (call_fusage, reg);
2282 can_store_by_pieces (len, constfun, constfundata, align)
2283 unsigned HOST_WIDE_INT len;
2284 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2288 unsigned HOST_WIDE_INT max_size, l;
2289 HOST_WIDE_INT offset = 0;
2290 enum machine_mode mode, tmode;
2291 enum insn_code icode;
2295 if (! MOVE_BY_PIECES_P (len, align))
2298 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2299 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2300 align = MOVE_MAX * BITS_PER_UNIT;
2302 /* We would first store what we can in the largest integer mode, then go to
2303 successively smaller modes. */
2306 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2311 max_size = MOVE_MAX_PIECES + 1;
2312 while (max_size > 1)
2314 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2315 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2316 if (GET_MODE_SIZE (tmode) < max_size)
2319 if (mode == VOIDmode)
2322 icode = mov_optab->handlers[(int) mode].insn_code;
2323 if (icode != CODE_FOR_nothing
2324 && align >= GET_MODE_ALIGNMENT (mode))
2326 unsigned int size = GET_MODE_SIZE (mode);
2333 cst = (*constfun) (constfundata, offset, mode);
2334 if (!LEGITIMATE_CONSTANT_P (cst))
2344 max_size = GET_MODE_SIZE (mode);
2347 /* The code above should have handled everything. */
2355 /* Generate several move instructions to store LEN bytes generated by
2356 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2357 pointer which will be passed as argument in every CONSTFUN call.
2358 ALIGN is maximum alignment we can assume. */
2361 store_by_pieces (to, len, constfun, constfundata, align)
2363 unsigned HOST_WIDE_INT len;
2364 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2368 struct store_by_pieces data;
2370 if (! MOVE_BY_PIECES_P (len, align))
2372 to = protect_from_queue (to, 1);
2373 data.constfun = constfun;
2374 data.constfundata = constfundata;
2377 store_by_pieces_1 (&data, align);
2380 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2381 rtx with BLKmode). The caller must pass TO through protect_from_queue
2382 before calling. ALIGN is maximum alignment we can assume. */
2385 clear_by_pieces (to, len, align)
2387 unsigned HOST_WIDE_INT len;
2390 struct store_by_pieces data;
2392 data.constfun = clear_by_pieces_1;
2393 data.constfundata = NULL;
2396 store_by_pieces_1 (&data, align);
2399 /* Callback routine for clear_by_pieces.
2400 Return const0_rtx unconditionally. */
2403 clear_by_pieces_1 (data, offset, mode)
2404 PTR data ATTRIBUTE_UNUSED;
2405 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2406 enum machine_mode mode ATTRIBUTE_UNUSED;
2411 /* Subroutine of clear_by_pieces and store_by_pieces.
2412 Generate several move instructions to store LEN bytes of block TO. (A MEM
2413 rtx with BLKmode). The caller must pass TO through protect_from_queue
2414 before calling. ALIGN is maximum alignment we can assume. */
2417 store_by_pieces_1 (data, align)
2418 struct store_by_pieces *data;
2421 rtx to_addr = XEXP (data->to, 0);
2422 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2423 enum machine_mode mode = VOIDmode, tmode;
2424 enum insn_code icode;
2427 data->to_addr = to_addr;
2429 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2430 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2432 data->explicit_inc_to = 0;
2434 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2436 data->offset = data->len;
2438 /* If storing requires more than two move insns,
2439 copy addresses to registers (to make displacements shorter)
2440 and use post-increment if available. */
2441 if (!data->autinc_to
2442 && move_by_pieces_ninsns (data->len, align) > 2)
2444 /* Determine the main mode we'll be using. */
2445 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2446 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2447 if (GET_MODE_SIZE (tmode) < max_size)
2450 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2452 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2453 data->autinc_to = 1;
2454 data->explicit_inc_to = -1;
2457 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2458 && ! data->autinc_to)
2460 data->to_addr = copy_addr_to_reg (to_addr);
2461 data->autinc_to = 1;
2462 data->explicit_inc_to = 1;
2465 if ( !data->autinc_to && CONSTANT_P (to_addr))
2466 data->to_addr = copy_addr_to_reg (to_addr);
2469 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2470 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2471 align = MOVE_MAX * BITS_PER_UNIT;
2473 /* First store what we can in the largest integer mode, then go to
2474 successively smaller modes. */
2476 while (max_size > 1)
2478 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2479 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2480 if (GET_MODE_SIZE (tmode) < max_size)
2483 if (mode == VOIDmode)
2486 icode = mov_optab->handlers[(int) mode].insn_code;
2487 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2488 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2490 max_size = GET_MODE_SIZE (mode);
2493 /* The code above should have handled everything. */
2498 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2499 with move instructions for mode MODE. GENFUN is the gen_... function
2500 to make a move insn for that mode. DATA has all the other info. */
2503 store_by_pieces_2 (genfun, mode, data)
2504 rtx (*genfun) PARAMS ((rtx, ...));
2505 enum machine_mode mode;
2506 struct store_by_pieces *data;
2508 unsigned int size = GET_MODE_SIZE (mode);
2511 while (data->len >= size)
2514 data->offset -= size;
2516 if (data->autinc_to)
2518 to1 = gen_rtx_MEM (mode, data->to_addr);
2519 MEM_COPY_ATTRIBUTES (to1, data->to);
2522 to1 = change_address (data->to, mode,
2523 plus_constant (data->to_addr, data->offset));
2525 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2526 emit_insn (gen_add2_insn (data->to_addr,
2527 GEN_INT (-(HOST_WIDE_INT) size)));
2529 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2530 emit_insn ((*genfun) (to1, cst));
2532 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2533 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2535 if (! data->reverse)
2536 data->offset += size;
2542 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2543 its length in bytes and ALIGN is the maximum alignment we can is has.
2545 If we call a function that returns the length of the block, return it. */
2548 clear_storage (object, size, align)
2553 #ifdef TARGET_MEM_FUNCTIONS
2555 tree call_expr, arg_list;
2559 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2560 just move a zero. Otherwise, do this a piece at a time. */
2561 if (GET_MODE (object) != BLKmode
2562 && GET_CODE (size) == CONST_INT
2563 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2564 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2567 object = protect_from_queue (object, 1);
2568 size = protect_from_queue (size, 0);
2570 if (GET_CODE (size) == CONST_INT
2571 && MOVE_BY_PIECES_P (INTVAL (size), align))
2572 clear_by_pieces (object, INTVAL (size), align);
2575 /* Try the most limited insn first, because there's no point
2576 including more than one in the machine description unless
2577 the more limited one has some advantage. */
2579 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2580 enum machine_mode mode;
2582 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2583 mode = GET_MODE_WIDER_MODE (mode))
2585 enum insn_code code = clrstr_optab[(int) mode];
2586 insn_operand_predicate_fn pred;
2588 if (code != CODE_FOR_nothing
2589 /* We don't need MODE to be narrower than
2590 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2591 the mode mask, as it is returned by the macro, it will
2592 definitely be less than the actual mode mask. */
2593 && ((GET_CODE (size) == CONST_INT
2594 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2595 <= (GET_MODE_MASK (mode) >> 1)))
2596 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2597 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2598 || (*pred) (object, BLKmode))
2599 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2600 || (*pred) (opalign, VOIDmode)))
2603 rtx last = get_last_insn ();
2606 op1 = convert_to_mode (mode, size, 1);
2607 pred = insn_data[(int) code].operand[1].predicate;
2608 if (pred != 0 && ! (*pred) (op1, mode))
2609 op1 = copy_to_mode_reg (mode, op1);
2611 pat = GEN_FCN ((int) code) (object, op1, opalign);
2618 delete_insns_since (last);
2622 /* OBJECT or SIZE may have been passed through protect_from_queue.
2624 It is unsafe to save the value generated by protect_from_queue
2625 and reuse it later. Consider what happens if emit_queue is
2626 called before the return value from protect_from_queue is used.
2628 Expansion of the CALL_EXPR below will call emit_queue before
2629 we are finished emitting RTL for argument setup. So if we are
2630 not careful we could get the wrong value for an argument.
2632 To avoid this problem we go ahead and emit code to copy OBJECT
2633 and SIZE into new pseudos. We can then place those new pseudos
2634 into an RTL_EXPR and use them later, even after a call to
2637 Note this is not strictly needed for library calls since they
2638 do not call emit_queue before loading their arguments. However,
2639 we may need to have library calls call emit_queue in the future
2640 since failing to do so could cause problems for targets which
2641 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2642 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2644 #ifdef TARGET_MEM_FUNCTIONS
2645 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2647 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2648 TREE_UNSIGNED (integer_type_node));
2649 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2652 #ifdef TARGET_MEM_FUNCTIONS
2653 /* It is incorrect to use the libcall calling conventions to call
2654 memset in this context.
2656 This could be a user call to memset and the user may wish to
2657 examine the return value from memset.
2659 For targets where libcalls and normal calls have different
2660 conventions for returning pointers, we could end up generating
2663 So instead of using a libcall sequence we build up a suitable
2664 CALL_EXPR and expand the call in the normal fashion. */
2665 if (fn == NULL_TREE)
2669 /* This was copied from except.c, I don't know if all this is
2670 necessary in this context or not. */
2671 fn = get_identifier ("memset");
2672 fntype = build_pointer_type (void_type_node);
2673 fntype = build_function_type (fntype, NULL_TREE);
2674 fn = build_decl (FUNCTION_DECL, fn, fntype);
2675 ggc_add_tree_root (&fn, 1);
2676 DECL_EXTERNAL (fn) = 1;
2677 TREE_PUBLIC (fn) = 1;
2678 DECL_ARTIFICIAL (fn) = 1;
2679 make_decl_rtl (fn, NULL);
2680 assemble_external (fn);
2683 /* We need to make an argument list for the function call.
2685 memset has three arguments, the first is a void * addresses, the
2686 second a integer with the initialization value, the last is a
2687 size_t byte count for the copy. */
2689 = build_tree_list (NULL_TREE,
2690 make_tree (build_pointer_type (void_type_node),
2692 TREE_CHAIN (arg_list)
2693 = build_tree_list (NULL_TREE,
2694 make_tree (integer_type_node, const0_rtx));
2695 TREE_CHAIN (TREE_CHAIN (arg_list))
2696 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2697 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2699 /* Now we have to build up the CALL_EXPR itself. */
2700 call_expr = build1 (ADDR_EXPR,
2701 build_pointer_type (TREE_TYPE (fn)), fn);
2702 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2703 call_expr, arg_list, NULL_TREE);
2704 TREE_SIDE_EFFECTS (call_expr) = 1;
2706 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2708 emit_library_call (bzero_libfunc, LCT_NORMAL,
2709 VOIDmode, 2, object, Pmode, size,
2710 TYPE_MODE (integer_type_node));
2718 /* Generate code to copy Y into X.
2719 Both Y and X must have the same mode, except that
2720 Y can be a constant with VOIDmode.
2721 This mode cannot be BLKmode; use emit_block_move for that.
2723 Return the last instruction emitted. */
2726 emit_move_insn (x, y)
2729 enum machine_mode mode = GET_MODE (x);
2730 rtx y_cst = NULL_RTX;
2733 x = protect_from_queue (x, 1);
2734 y = protect_from_queue (y, 0);
2736 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2739 /* Never force constant_p_rtx to memory. */
2740 if (GET_CODE (y) == CONSTANT_P_RTX)
2742 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2745 y = force_const_mem (mode, y);
2748 /* If X or Y are memory references, verify that their addresses are valid
2750 if (GET_CODE (x) == MEM
2751 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2752 && ! push_operand (x, GET_MODE (x)))
2754 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2755 x = change_address (x, VOIDmode, XEXP (x, 0));
2757 if (GET_CODE (y) == MEM
2758 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2760 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2761 y = change_address (y, VOIDmode, XEXP (y, 0));
2763 if (mode == BLKmode)
2766 last_insn = emit_move_insn_1 (x, y);
2768 if (y_cst && GET_CODE (x) == REG)
2769 REG_NOTES (last_insn)
2770 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2775 /* Low level part of emit_move_insn.
2776 Called just like emit_move_insn, but assumes X and Y
2777 are basically valid. */
2780 emit_move_insn_1 (x, y)
2783 enum machine_mode mode = GET_MODE (x);
2784 enum machine_mode submode;
2785 enum mode_class class = GET_MODE_CLASS (mode);
2788 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2791 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2793 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2795 /* Expand complex moves by moving real part and imag part, if possible. */
2796 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2797 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2799 (class == MODE_COMPLEX_INT
2800 ? MODE_INT : MODE_FLOAT),
2802 && (mov_optab->handlers[(int) submode].insn_code
2803 != CODE_FOR_nothing))
2805 /* Don't split destination if it is a stack push. */
2806 int stack = push_operand (x, GET_MODE (x));
2808 #ifdef PUSH_ROUNDING
2809 /* In case we output to the stack, but the size is smaller machine can
2810 push exactly, we need to use move instructions. */
2812 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2815 int offset1, offset2;
2817 /* Do not use anti_adjust_stack, since we don't want to update
2818 stack_pointer_delta. */
2819 temp = expand_binop (Pmode,
2820 #ifdef STACK_GROWS_DOWNWARD
2827 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2831 if (temp != stack_pointer_rtx)
2832 emit_move_insn (stack_pointer_rtx, temp);
2833 #ifdef STACK_GROWS_DOWNWARD
2835 offset2 = GET_MODE_SIZE (submode);
2837 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2838 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2839 + GET_MODE_SIZE (submode));
2841 emit_move_insn (change_address (x, submode,
2842 gen_rtx_PLUS (Pmode,
2844 GEN_INT (offset1))),
2845 gen_realpart (submode, y));
2846 emit_move_insn (change_address (x, submode,
2847 gen_rtx_PLUS (Pmode,
2849 GEN_INT (offset2))),
2850 gen_imagpart (submode, y));
2854 /* If this is a stack, push the highpart first, so it
2855 will be in the argument order.
2857 In that case, change_address is used only to convert
2858 the mode, not to change the address. */
2861 /* Note that the real part always precedes the imag part in memory
2862 regardless of machine's endianness. */
2863 #ifdef STACK_GROWS_DOWNWARD
2864 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2865 (gen_rtx_MEM (submode, XEXP (x, 0)),
2866 gen_imagpart (submode, y)));
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_realpart (submode, y)));
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2872 (gen_rtx_MEM (submode, XEXP (x, 0)),
2873 gen_realpart (submode, y)));
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_imagpart (submode, y)));
2881 rtx realpart_x, realpart_y;
2882 rtx imagpart_x, imagpart_y;
2884 /* If this is a complex value with each part being smaller than a
2885 word, the usual calling sequence will likely pack the pieces into
2886 a single register. Unfortunately, SUBREG of hard registers only
2887 deals in terms of words, so we have a problem converting input
2888 arguments to the CONCAT of two registers that is used elsewhere
2889 for complex values. If this is before reload, we can copy it into
2890 memory and reload. FIXME, we should see about using extract and
2891 insert on integer registers, but complex short and complex char
2892 variables should be rarely used. */
2893 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2894 && (reload_in_progress | reload_completed) == 0)
2896 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2897 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2899 if (packed_dest_p || packed_src_p)
2901 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2902 ? MODE_FLOAT : MODE_INT);
2904 enum machine_mode reg_mode
2905 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2907 if (reg_mode != BLKmode)
2909 rtx mem = assign_stack_temp (reg_mode,
2910 GET_MODE_SIZE (mode), 0);
2911 rtx cmem = change_address (mem, mode, NULL_RTX);
2914 = N_("function using short complex types cannot be inline");
2918 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2919 emit_move_insn_1 (cmem, y);
2920 return emit_move_insn_1 (sreg, mem);
2924 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2925 emit_move_insn_1 (mem, sreg);
2926 return emit_move_insn_1 (x, cmem);
2932 realpart_x = gen_realpart (submode, x);
2933 realpart_y = gen_realpart (submode, y);
2934 imagpart_x = gen_imagpart (submode, x);
2935 imagpart_y = gen_imagpart (submode, y);
2937 /* Show the output dies here. This is necessary for SUBREGs
2938 of pseudos since we cannot track their lifetimes correctly;
2939 hard regs shouldn't appear here except as return values.
2940 We never want to emit such a clobber after reload. */
2942 && ! (reload_in_progress || reload_completed)
2943 && (GET_CODE (realpart_x) == SUBREG
2944 || GET_CODE (imagpart_x) == SUBREG))
2946 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2949 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2950 (realpart_x, realpart_y));
2951 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2952 (imagpart_x, imagpart_y));
2955 return get_last_insn ();
2958 /* This will handle any multi-word mode that lacks a move_insn pattern.
2959 However, you will get better code if you define such patterns,
2960 even if they must turn into multiple assembler instructions. */
2961 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2967 #ifdef PUSH_ROUNDING
2969 /* If X is a push on the stack, do the push now and replace
2970 X with a reference to the stack pointer. */
2971 if (push_operand (x, GET_MODE (x)))
2973 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2974 x = change_address (x, VOIDmode, stack_pointer_rtx);
2978 /* If we are in reload, see if either operand is a MEM whose address
2979 is scheduled for replacement. */
2980 if (reload_in_progress && GET_CODE (x) == MEM
2981 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2983 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2985 MEM_COPY_ATTRIBUTES (new, x);
2988 if (reload_in_progress && GET_CODE (y) == MEM
2989 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2991 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2993 MEM_COPY_ATTRIBUTES (new, y);
3001 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3004 rtx xpart = operand_subword (x, i, 1, mode);
3005 rtx ypart = operand_subword (y, i, 1, mode);
3007 /* If we can't get a part of Y, put Y into memory if it is a
3008 constant. Otherwise, force it into a register. If we still
3009 can't get a part of Y, abort. */
3010 if (ypart == 0 && CONSTANT_P (y))
3012 y = force_const_mem (mode, y);
3013 ypart = operand_subword (y, i, 1, mode);
3015 else if (ypart == 0)
3016 ypart = operand_subword_force (y, i, mode);
3018 if (xpart == 0 || ypart == 0)
3021 need_clobber |= (GET_CODE (xpart) == SUBREG);
3023 last_insn = emit_move_insn (xpart, ypart);
3026 seq = gen_sequence ();
3029 /* Show the output dies here. This is necessary for SUBREGs
3030 of pseudos since we cannot track their lifetimes correctly;
3031 hard regs shouldn't appear here except as return values.
3032 We never want to emit such a clobber after reload. */
3034 && ! (reload_in_progress || reload_completed)
3035 && need_clobber != 0)
3037 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3048 /* Pushing data onto the stack. */
3050 /* Push a block of length SIZE (perhaps variable)
3051 and return an rtx to address the beginning of the block.
3052 Note that it is not possible for the value returned to be a QUEUED.
3053 The value may be virtual_outgoing_args_rtx.
3055 EXTRA is the number of bytes of padding to push in addition to SIZE.
3056 BELOW nonzero means this padding comes at low addresses;
3057 otherwise, the padding comes at high addresses. */
3060 push_block (size, extra, below)
3066 size = convert_modes (Pmode, ptr_mode, size, 1);
3067 if (CONSTANT_P (size))
3068 anti_adjust_stack (plus_constant (size, extra));
3069 else if (GET_CODE (size) == REG && extra == 0)
3070 anti_adjust_stack (size);
3073 temp = copy_to_mode_reg (Pmode, size);
3075 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3076 temp, 0, OPTAB_LIB_WIDEN);
3077 anti_adjust_stack (temp);
3080 #ifndef STACK_GROWS_DOWNWARD
3081 #ifdef ARGS_GROW_DOWNWARD
3082 if (!ACCUMULATE_OUTGOING_ARGS)
3090 /* Return the lowest stack address when STACK or ARGS grow downward and
3091 we are not aaccumulating outgoing arguments (the c4x port uses such
3093 temp = virtual_outgoing_args_rtx;
3094 if (extra != 0 && below)
3095 temp = plus_constant (temp, extra);
3099 if (GET_CODE (size) == CONST_INT)
3100 temp = plus_constant (virtual_outgoing_args_rtx,
3101 -INTVAL (size) - (below ? 0 : extra));
3102 else if (extra != 0 && !below)
3103 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3104 negate_rtx (Pmode, plus_constant (size, extra)));
3106 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3107 negate_rtx (Pmode, size));
3110 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3114 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3115 block of SIZE bytes. */
3118 get_push_address (size)
3123 if (STACK_PUSH_CODE == POST_DEC)
3124 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3125 else if (STACK_PUSH_CODE == POST_INC)
3126 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3128 temp = stack_pointer_rtx;
3130 return copy_to_reg (temp);
3133 /* Emit single push insn. */
3135 emit_single_push_insn (mode, x, type)
3137 enum machine_mode mode;
3140 #ifdef PUSH_ROUNDING
3142 int rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3145 if (GET_MODE_SIZE (mode) == rounded_size)
3146 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3149 #ifdef STACK_GROWS_DOWNWARD
3150 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3151 GEN_INT (-rounded_size));
3153 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3154 GEN_INT (rounded_size));
3156 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3159 dest = gen_rtx_MEM (mode, dest_addr);
3161 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3165 set_mem_attributes (dest, type, 1);
3166 /* Function incoming arguments may overlap with sibling call
3167 outgoing arguments and we cannot allow reordering of reads
3168 from function arguments with stores to outgoing arguments
3169 of sibling calls. */
3170 MEM_ALIAS_SET (dest) = 0;
3172 emit_move_insn (dest, x);
3178 /* Generate code to push X onto the stack, assuming it has mode MODE and
3180 MODE is redundant except when X is a CONST_INT (since they don't
3182 SIZE is an rtx for the size of data to be copied (in bytes),
3183 needed only if X is BLKmode.
3185 ALIGN (in bits) is maximum alignment we can assume.
3187 If PARTIAL and REG are both nonzero, then copy that many of the first
3188 words of X into registers starting with REG, and push the rest of X.
3189 The amount of space pushed is decreased by PARTIAL words,
3190 rounded *down* to a multiple of PARM_BOUNDARY.
3191 REG must be a hard register in this case.
3192 If REG is zero but PARTIAL is not, take any all others actions for an
3193 argument partially in registers, but do not actually load any
3196 EXTRA is the amount in bytes of extra space to leave next to this arg.
3197 This is ignored if an argument block has already been allocated.
3199 On a machine that lacks real push insns, ARGS_ADDR is the address of
3200 the bottom of the argument block for this call. We use indexing off there
3201 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3202 argument block has not been preallocated.
3204 ARGS_SO_FAR is the size of args previously pushed for this call.
3206 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3207 for arguments passed in registers. If nonzero, it will be the number
3208 of bytes required. */
3211 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3212 args_addr, args_so_far, reg_parm_stack_space,
3215 enum machine_mode mode;
3224 int reg_parm_stack_space;
3228 enum direction stack_direction
3229 #ifdef STACK_GROWS_DOWNWARD
3235 /* Decide where to pad the argument: `downward' for below,
3236 `upward' for above, or `none' for don't pad it.
3237 Default is below for small data on big-endian machines; else above. */
3238 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3240 /* Invert direction if stack is post-update. */
3241 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3242 if (where_pad != none)
3243 where_pad = (where_pad == downward ? upward : downward);
3245 xinner = x = protect_from_queue (x, 0);
3247 if (mode == BLKmode)
3249 /* Copy a block into the stack, entirely or partially. */
3252 int used = partial * UNITS_PER_WORD;
3253 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3261 /* USED is now the # of bytes we need not copy to the stack
3262 because registers will take care of them. */
3265 xinner = change_address (xinner, BLKmode,
3266 plus_constant (XEXP (xinner, 0), used));
3268 /* If the partial register-part of the arg counts in its stack size,
3269 skip the part of stack space corresponding to the registers.
3270 Otherwise, start copying to the beginning of the stack space,
3271 by setting SKIP to 0. */
3272 skip = (reg_parm_stack_space == 0) ? 0 : used;
3274 #ifdef PUSH_ROUNDING
3275 /* Do it with several push insns if that doesn't take lots of insns
3276 and if there is no difficulty with push insns that skip bytes
3277 on the stack for alignment purposes. */
3280 && GET_CODE (size) == CONST_INT
3282 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3283 /* Here we avoid the case of a structure whose weak alignment
3284 forces many pushes of a small amount of data,
3285 and such small pushes do rounding that causes trouble. */
3286 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3287 || align >= BIGGEST_ALIGNMENT
3288 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3289 == (align / BITS_PER_UNIT)))
3290 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3292 /* Push padding now if padding above and stack grows down,
3293 or if padding below and stack grows up.
3294 But if space already allocated, this has already been done. */
3295 if (extra && args_addr == 0
3296 && where_pad != none && where_pad != stack_direction)
3297 anti_adjust_stack (GEN_INT (extra));
3299 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3301 if (current_function_check_memory_usage && ! in_check_memory_usage)
3305 in_check_memory_usage = 1;
3306 temp = get_push_address (INTVAL (size) - used);
3307 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3308 emit_library_call (chkr_copy_bitmap_libfunc,
3309 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3310 Pmode, XEXP (xinner, 0), Pmode,
3311 GEN_INT (INTVAL (size) - used),
3312 TYPE_MODE (sizetype));
3314 emit_library_call (chkr_set_right_libfunc,
3315 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3316 Pmode, GEN_INT (INTVAL (size) - used),
3317 TYPE_MODE (sizetype),
3318 GEN_INT (MEMORY_USE_RW),
3319 TYPE_MODE (integer_type_node));
3320 in_check_memory_usage = 0;
3324 #endif /* PUSH_ROUNDING */
3328 /* Otherwise make space on the stack and copy the data
3329 to the address of that space. */
3331 /* Deduct words put into registers from the size we must copy. */
3334 if (GET_CODE (size) == CONST_INT)
3335 size = GEN_INT (INTVAL (size) - used);
3337 size = expand_binop (GET_MODE (size), sub_optab, size,
3338 GEN_INT (used), NULL_RTX, 0,
3342 /* Get the address of the stack space.
3343 In this case, we do not deal with EXTRA separately.
3344 A single stack adjust will do. */
3347 temp = push_block (size, extra, where_pad == downward);
3350 else if (GET_CODE (args_so_far) == CONST_INT)
3351 temp = memory_address (BLKmode,
3352 plus_constant (args_addr,
3353 skip + INTVAL (args_so_far)));
3355 temp = memory_address (BLKmode,
3356 plus_constant (gen_rtx_PLUS (Pmode,
3360 if (current_function_check_memory_usage && ! in_check_memory_usage)
3362 in_check_memory_usage = 1;
3363 target = copy_to_reg (temp);
3364 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3365 emit_library_call (chkr_copy_bitmap_libfunc,
3366 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3368 XEXP (xinner, 0), Pmode,
3369 size, TYPE_MODE (sizetype));
3371 emit_library_call (chkr_set_right_libfunc,
3372 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3374 size, TYPE_MODE (sizetype),
3375 GEN_INT (MEMORY_USE_RW),
3376 TYPE_MODE (integer_type_node));
3377 in_check_memory_usage = 0;
3380 target = gen_rtx_MEM (BLKmode, temp);
3384 set_mem_attributes (target, type, 1);
3385 /* Function incoming arguments may overlap with sibling call
3386 outgoing arguments and we cannot allow reordering of reads
3387 from function arguments with stores to outgoing arguments
3388 of sibling calls. */
3389 MEM_ALIAS_SET (target) = 0;
3392 /* TEMP is the address of the block. Copy the data there. */
3393 if (GET_CODE (size) == CONST_INT
3394 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3396 move_by_pieces (target, xinner, INTVAL (size), align);
3401 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3402 enum machine_mode mode;
3404 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3406 mode = GET_MODE_WIDER_MODE (mode))
3408 enum insn_code code = movstr_optab[(int) mode];
3409 insn_operand_predicate_fn pred;
3411 if (code != CODE_FOR_nothing
3412 && ((GET_CODE (size) == CONST_INT
3413 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3414 <= (GET_MODE_MASK (mode) >> 1)))
3415 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3416 && (!(pred = insn_data[(int) code].operand[0].predicate)
3417 || ((*pred) (target, BLKmode)))
3418 && (!(pred = insn_data[(int) code].operand[1].predicate)
3419 || ((*pred) (xinner, BLKmode)))
3420 && (!(pred = insn_data[(int) code].operand[3].predicate)
3421 || ((*pred) (opalign, VOIDmode))))
3423 rtx op2 = convert_to_mode (mode, size, 1);
3424 rtx last = get_last_insn ();
3427 pred = insn_data[(int) code].operand[2].predicate;
3428 if (pred != 0 && ! (*pred) (op2, mode))
3429 op2 = copy_to_mode_reg (mode, op2);
3431 pat = GEN_FCN ((int) code) (target, xinner,
3439 delete_insns_since (last);
3444 if (!ACCUMULATE_OUTGOING_ARGS)
3446 /* If the source is referenced relative to the stack pointer,
3447 copy it to another register to stabilize it. We do not need
3448 to do this if we know that we won't be changing sp. */
3450 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3451 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3452 temp = copy_to_reg (temp);
3455 /* Make inhibit_defer_pop nonzero around the library call
3456 to force it to pop the bcopy-arguments right away. */
3458 #ifdef TARGET_MEM_FUNCTIONS
3459 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3460 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3461 convert_to_mode (TYPE_MODE (sizetype),
3462 size, TREE_UNSIGNED (sizetype)),
3463 TYPE_MODE (sizetype));
3465 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3466 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3467 convert_to_mode (TYPE_MODE (integer_type_node),
3469 TREE_UNSIGNED (integer_type_node)),
3470 TYPE_MODE (integer_type_node));
3475 else if (partial > 0)
3477 /* Scalar partly in registers. */
3479 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3482 /* # words of start of argument
3483 that we must make space for but need not store. */
3484 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3485 int args_offset = INTVAL (args_so_far);
3488 /* Push padding now if padding above and stack grows down,
3489 or if padding below and stack grows up.
3490 But if space already allocated, this has already been done. */
3491 if (extra && args_addr == 0
3492 && where_pad != none && where_pad != stack_direction)
3493 anti_adjust_stack (GEN_INT (extra));
3495 /* If we make space by pushing it, we might as well push
3496 the real data. Otherwise, we can leave OFFSET nonzero
3497 and leave the space uninitialized. */
3501 /* Now NOT_STACK gets the number of words that we don't need to
3502 allocate on the stack. */
3503 not_stack = partial - offset;
3505 /* If the partial register-part of the arg counts in its stack size,
3506 skip the part of stack space corresponding to the registers.
3507 Otherwise, start copying to the beginning of the stack space,
3508 by setting SKIP to 0. */
3509 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3511 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3512 x = validize_mem (force_const_mem (mode, x));
3514 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3515 SUBREGs of such registers are not allowed. */
3516 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3517 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3518 x = copy_to_reg (x);
3520 /* Loop over all the words allocated on the stack for this arg. */
3521 /* We can do it by words, because any scalar bigger than a word
3522 has a size a multiple of a word. */
3523 #ifndef PUSH_ARGS_REVERSED
3524 for (i = not_stack; i < size; i++)
3526 for (i = size - 1; i >= not_stack; i--)
3528 if (i >= not_stack + offset)
3529 emit_push_insn (operand_subword_force (x, i, mode),
3530 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3532 GEN_INT (args_offset + ((i - not_stack + skip)
3534 reg_parm_stack_space, alignment_pad);
3539 rtx target = NULL_RTX;
3542 /* Push padding now if padding above and stack grows down,
3543 or if padding below and stack grows up.
3544 But if space already allocated, this has already been done. */
3545 if (extra && args_addr == 0
3546 && where_pad != none && where_pad != stack_direction)
3547 anti_adjust_stack (GEN_INT (extra));
3549 #ifdef PUSH_ROUNDING
3550 if (args_addr == 0 && PUSH_ARGS)
3551 emit_single_push_insn (mode, x, type);
3555 if (GET_CODE (args_so_far) == CONST_INT)
3557 = memory_address (mode,
3558 plus_constant (args_addr,
3559 INTVAL (args_so_far)));
3561 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3564 dest = gen_rtx_MEM (mode, addr);
3567 set_mem_attributes (dest, type, 1);
3568 /* Function incoming arguments may overlap with sibling call
3569 outgoing arguments and we cannot allow reordering of reads
3570 from function arguments with stores to outgoing arguments
3571 of sibling calls. */
3572 MEM_ALIAS_SET (dest) = 0;
3575 emit_move_insn (dest, x);
3579 if (current_function_check_memory_usage && ! in_check_memory_usage)
3581 in_check_memory_usage = 1;
3583 target = get_push_address (GET_MODE_SIZE (mode));
3585 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3586 emit_library_call (chkr_copy_bitmap_libfunc,
3587 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3588 Pmode, XEXP (x, 0), Pmode,
3589 GEN_INT (GET_MODE_SIZE (mode)),
3590 TYPE_MODE (sizetype));
3592 emit_library_call (chkr_set_right_libfunc,
3593 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3594 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3595 TYPE_MODE (sizetype),
3596 GEN_INT (MEMORY_USE_RW),
3597 TYPE_MODE (integer_type_node));
3598 in_check_memory_usage = 0;
3603 /* If part should go in registers, copy that part
3604 into the appropriate registers. Do this now, at the end,
3605 since mem-to-mem copies above may do function calls. */
3606 if (partial > 0 && reg != 0)
3608 /* Handle calls that pass values in multiple non-contiguous locations.
3609 The Irix 6 ABI has examples of this. */
3610 if (GET_CODE (reg) == PARALLEL)
3611 emit_group_load (reg, x, -1, align); /* ??? size? */
3613 move_block_to_reg (REGNO (reg), x, partial, mode);
3616 if (extra && args_addr == 0 && where_pad == stack_direction)
3617 anti_adjust_stack (GEN_INT (extra));
3619 if (alignment_pad && args_addr == 0)
3620 anti_adjust_stack (alignment_pad);
3623 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3631 /* Only registers can be subtargets. */
3632 || GET_CODE (x) != REG
3633 /* If the register is readonly, it can't be set more than once. */
3634 || RTX_UNCHANGING_P (x)
3635 /* Don't use hard regs to avoid extending their life. */
3636 || REGNO (x) < FIRST_PSEUDO_REGISTER
3637 /* Avoid subtargets inside loops,
3638 since they hide some invariant expressions. */
3639 || preserve_subexpressions_p ())
3643 /* Expand an assignment that stores the value of FROM into TO.
3644 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3645 (This may contain a QUEUED rtx;
3646 if the value is constant, this rtx is a constant.)
3647 Otherwise, the returned value is NULL_RTX.
3649 SUGGEST_REG is no longer actually used.
3650 It used to mean, copy the value through a register
3651 and return that register, if that is possible.
3652 We now use WANT_VALUE to decide whether to do this. */
3655 expand_assignment (to, from, want_value, suggest_reg)
3658 int suggest_reg ATTRIBUTE_UNUSED;
3660 register rtx to_rtx = 0;
3663 /* Don't crash if the lhs of the assignment was erroneous. */
3665 if (TREE_CODE (to) == ERROR_MARK)
3667 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3668 return want_value ? result : NULL_RTX;
3671 /* Assignment of a structure component needs special treatment
3672 if the structure component's rtx is not simply a MEM.
3673 Assignment of an array element at a constant index, and assignment of
3674 an array element in an unaligned packed structure field, has the same
3677 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3678 || TREE_CODE (to) == ARRAY_REF)
3680 enum machine_mode mode1;
3681 HOST_WIDE_INT bitsize, bitpos;
3686 unsigned int alignment;
3689 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3690 &unsignedp, &volatilep, &alignment);
3692 /* If we are going to use store_bit_field and extract_bit_field,
3693 make sure to_rtx will be safe for multiple use. */
3695 if (mode1 == VOIDmode && want_value)
3696 tem = stabilize_reference (tem);
3698 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3701 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3703 if (GET_CODE (to_rtx) != MEM)
3706 if (GET_MODE (offset_rtx) != ptr_mode)
3708 #ifdef POINTERS_EXTEND_UNSIGNED
3709 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3711 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3715 /* A constant address in TO_RTX can have VOIDmode, we must not try
3716 to call force_reg for that case. Avoid that case. */
3717 if (GET_CODE (to_rtx) == MEM
3718 && GET_MODE (to_rtx) == BLKmode
3719 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3721 && (bitpos % bitsize) == 0
3722 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3723 && alignment == GET_MODE_ALIGNMENT (mode1))
3725 rtx temp = change_address (to_rtx, mode1,
3726 plus_constant (XEXP (to_rtx, 0),
3729 if (GET_CODE (XEXP (temp, 0)) == REG)
3732 to_rtx = change_address (to_rtx, mode1,
3733 force_reg (GET_MODE (XEXP (temp, 0)),
3738 to_rtx = change_address (to_rtx, VOIDmode,
3739 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3740 force_reg (ptr_mode,
3746 if (GET_CODE (to_rtx) == MEM)
3748 /* When the offset is zero, to_rtx is the address of the
3749 structure we are storing into, and hence may be shared.
3750 We must make a new MEM before setting the volatile bit. */
3752 to_rtx = copy_rtx (to_rtx);
3754 MEM_VOLATILE_P (to_rtx) = 1;
3756 #if 0 /* This was turned off because, when a field is volatile
3757 in an object which is not volatile, the object may be in a register,
3758 and then we would abort over here. */
3764 if (TREE_CODE (to) == COMPONENT_REF
3765 && TREE_READONLY (TREE_OPERAND (to, 1)))
3768 to_rtx = copy_rtx (to_rtx);
3770 RTX_UNCHANGING_P (to_rtx) = 1;
3773 /* Check the access. */
3774 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3779 enum machine_mode best_mode;
3781 best_mode = get_best_mode (bitsize, bitpos,
3782 TYPE_ALIGN (TREE_TYPE (tem)),
3784 if (best_mode == VOIDmode)
3787 best_mode_size = GET_MODE_BITSIZE (best_mode);
3788 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3789 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3790 size *= GET_MODE_SIZE (best_mode);
3792 /* Check the access right of the pointer. */
3793 in_check_memory_usage = 1;
3795 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3796 VOIDmode, 3, to_addr, Pmode,
3797 GEN_INT (size), TYPE_MODE (sizetype),
3798 GEN_INT (MEMORY_USE_WO),
3799 TYPE_MODE (integer_type_node));
3800 in_check_memory_usage = 0;
3803 /* If this is a varying-length object, we must get the address of
3804 the source and do an explicit block move. */
3807 unsigned int from_align;
3808 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3810 = change_address (to_rtx, VOIDmode,
3811 plus_constant (XEXP (to_rtx, 0),
3812 bitpos / BITS_PER_UNIT));
3814 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3815 MIN (alignment, from_align));
3822 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3824 /* Spurious cast for HPUX compiler. */
3825 ? ((enum machine_mode)
3826 TYPE_MODE (TREE_TYPE (to)))
3830 int_size_in_bytes (TREE_TYPE (tem)),
3831 get_alias_set (to));
3833 preserve_temp_slots (result);
3837 /* If the value is meaningful, convert RESULT to the proper mode.
3838 Otherwise, return nothing. */
3839 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3840 TYPE_MODE (TREE_TYPE (from)),
3842 TREE_UNSIGNED (TREE_TYPE (to)))
3847 /* If the rhs is a function call and its value is not an aggregate,
3848 call the function before we start to compute the lhs.
3849 This is needed for correct code for cases such as
3850 val = setjmp (buf) on machines where reference to val
3851 requires loading up part of an address in a separate insn.
3853 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3854 since it might be a promoted variable where the zero- or sign- extension
3855 needs to be done. Handling this in the normal way is safe because no
3856 computation is done before the call. */
3857 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3858 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3859 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3860 && GET_CODE (DECL_RTL (to)) == REG))
3865 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3867 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3869 /* Handle calls that return values in multiple non-contiguous locations.
3870 The Irix 6 ABI has examples of this. */
3871 if (GET_CODE (to_rtx) == PARALLEL)
3872 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3873 TYPE_ALIGN (TREE_TYPE (from)));
3874 else if (GET_MODE (to_rtx) == BLKmode)
3875 emit_block_move (to_rtx, value, expr_size (from),
3876 TYPE_ALIGN (TREE_TYPE (from)));
3879 #ifdef POINTERS_EXTEND_UNSIGNED
3880 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3881 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3882 value = convert_memory_address (GET_MODE (to_rtx), value);
3884 emit_move_insn (to_rtx, value);
3886 preserve_temp_slots (to_rtx);
3889 return want_value ? to_rtx : NULL_RTX;
3892 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3893 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3897 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3898 if (GET_CODE (to_rtx) == MEM)
3899 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3902 /* Don't move directly into a return register. */
3903 if (TREE_CODE (to) == RESULT_DECL
3904 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3909 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3911 if (GET_CODE (to_rtx) == PARALLEL)
3912 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3913 TYPE_ALIGN (TREE_TYPE (from)));
3915 emit_move_insn (to_rtx, temp);
3917 preserve_temp_slots (to_rtx);
3920 return want_value ? to_rtx : NULL_RTX;
3923 /* In case we are returning the contents of an object which overlaps
3924 the place the value is being stored, use a safe function when copying
3925 a value through a pointer into a structure value return block. */
3926 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3927 && current_function_returns_struct
3928 && !current_function_returns_pcc_struct)
3933 size = expr_size (from);
3934 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3935 EXPAND_MEMORY_USE_DONT);
3937 /* Copy the rights of the bitmap. */
3938 if (current_function_check_memory_usage)
3939 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3940 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3941 XEXP (from_rtx, 0), Pmode,
3942 convert_to_mode (TYPE_MODE (sizetype),
3943 size, TREE_UNSIGNED (sizetype)),
3944 TYPE_MODE (sizetype));
3946 #ifdef TARGET_MEM_FUNCTIONS
3947 emit_library_call (memmove_libfunc, LCT_NORMAL,
3948 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3949 XEXP (from_rtx, 0), Pmode,
3950 convert_to_mode (TYPE_MODE (sizetype),
3951 size, TREE_UNSIGNED (sizetype)),
3952 TYPE_MODE (sizetype));
3954 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3955 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3956 XEXP (to_rtx, 0), Pmode,
3957 convert_to_mode (TYPE_MODE (integer_type_node),
3958 size, TREE_UNSIGNED (integer_type_node)),
3959 TYPE_MODE (integer_type_node));
3962 preserve_temp_slots (to_rtx);
3965 return want_value ? to_rtx : NULL_RTX;
3968 /* Compute FROM and store the value in the rtx we got. */
3971 result = store_expr (from, to_rtx, want_value);
3972 preserve_temp_slots (result);
3975 return want_value ? result : NULL_RTX;
3978 /* Generate code for computing expression EXP,
3979 and storing the value into TARGET.
3980 TARGET may contain a QUEUED rtx.
3982 If WANT_VALUE is nonzero, return a copy of the value
3983 not in TARGET, so that we can be sure to use the proper
3984 value in a containing expression even if TARGET has something
3985 else stored in it. If possible, we copy the value through a pseudo
3986 and return that pseudo. Or, if the value is constant, we try to
3987 return the constant. In some cases, we return a pseudo
3988 copied *from* TARGET.
3990 If the mode is BLKmode then we may return TARGET itself.
3991 It turns out that in BLKmode it doesn't cause a problem.
3992 because C has no operators that could combine two different
3993 assignments into the same BLKmode object with different values
3994 with no sequence point. Will other languages need this to
3997 If WANT_VALUE is 0, we return NULL, to make sure
3998 to catch quickly any cases where the caller uses the value
3999 and fails to set WANT_VALUE. */
4002 store_expr (exp, target, want_value)
4004 register rtx target;
4008 int dont_return_target = 0;
4009 int dont_store_target = 0;
4011 if (TREE_CODE (exp) == COMPOUND_EXPR)
4013 /* Perform first part of compound expression, then assign from second
4015 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4017 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4019 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4021 /* For conditional expression, get safe form of the target. Then
4022 test the condition, doing the appropriate assignment on either
4023 side. This avoids the creation of unnecessary temporaries.
4024 For non-BLKmode, it is more efficient not to do this. */
4026 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4029 target = protect_from_queue (target, 1);
4031 do_pending_stack_adjust ();
4033 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4034 start_cleanup_deferral ();
4035 store_expr (TREE_OPERAND (exp, 1), target, 0);
4036 end_cleanup_deferral ();
4038 emit_jump_insn (gen_jump (lab2));
4041 start_cleanup_deferral ();
4042 store_expr (TREE_OPERAND (exp, 2), target, 0);
4043 end_cleanup_deferral ();
4048 return want_value ? target : NULL_RTX;
4050 else if (queued_subexp_p (target))
4051 /* If target contains a postincrement, let's not risk
4052 using it as the place to generate the rhs. */
4054 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4056 /* Expand EXP into a new pseudo. */
4057 temp = gen_reg_rtx (GET_MODE (target));
4058 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4061 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4063 /* If target is volatile, ANSI requires accessing the value
4064 *from* the target, if it is accessed. So make that happen.
4065 In no case return the target itself. */
4066 if (! MEM_VOLATILE_P (target) && want_value)
4067 dont_return_target = 1;
4069 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4070 && GET_MODE (target) != BLKmode)
4071 /* If target is in memory and caller wants value in a register instead,
4072 arrange that. Pass TARGET as target for expand_expr so that,
4073 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4074 We know expand_expr will not use the target in that case.
4075 Don't do this if TARGET is volatile because we are supposed
4076 to write it and then read it. */
4078 temp = expand_expr (exp, target, GET_MODE (target), 0);
4079 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4081 /* If TEMP is already in the desired TARGET, only copy it from
4082 memory and don't store it there again. */
4084 || (rtx_equal_p (temp, target)
4085 && ! side_effects_p (temp) && ! side_effects_p (target)))
4086 dont_store_target = 1;
4087 temp = copy_to_reg (temp);
4089 dont_return_target = 1;
4091 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4092 /* If this is an scalar in a register that is stored in a wider mode
4093 than the declared mode, compute the result into its declared mode
4094 and then convert to the wider mode. Our value is the computed
4097 /* If we don't want a value, we can do the conversion inside EXP,
4098 which will often result in some optimizations. Do the conversion
4099 in two steps: first change the signedness, if needed, then
4100 the extend. But don't do this if the type of EXP is a subtype
4101 of something else since then the conversion might involve
4102 more than just converting modes. */
4103 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4104 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4106 if (TREE_UNSIGNED (TREE_TYPE (exp))
4107 != SUBREG_PROMOTED_UNSIGNED_P (target))
4110 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4114 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4115 SUBREG_PROMOTED_UNSIGNED_P (target)),
4119 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4121 /* If TEMP is a volatile MEM and we want a result value, make
4122 the access now so it gets done only once. Likewise if
4123 it contains TARGET. */
4124 if (GET_CODE (temp) == MEM && want_value
4125 && (MEM_VOLATILE_P (temp)
4126 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4127 temp = copy_to_reg (temp);
4129 /* If TEMP is a VOIDmode constant, use convert_modes to make
4130 sure that we properly convert it. */
4131 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4132 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4133 TYPE_MODE (TREE_TYPE (exp)), temp,
4134 SUBREG_PROMOTED_UNSIGNED_P (target));
4136 convert_move (SUBREG_REG (target), temp,
4137 SUBREG_PROMOTED_UNSIGNED_P (target));
4139 /* If we promoted a constant, change the mode back down to match
4140 target. Otherwise, the caller might get confused by a result whose
4141 mode is larger than expected. */
4143 if (want_value && GET_MODE (temp) != GET_MODE (target)
4144 && GET_MODE (temp) != VOIDmode)
4146 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4147 SUBREG_PROMOTED_VAR_P (temp) = 1;
4148 SUBREG_PROMOTED_UNSIGNED_P (temp)
4149 = SUBREG_PROMOTED_UNSIGNED_P (target);
4152 return want_value ? temp : NULL_RTX;
4156 temp = expand_expr (exp, target, GET_MODE (target), 0);
4157 /* Return TARGET if it's a specified hardware register.
4158 If TARGET is a volatile mem ref, either return TARGET
4159 or return a reg copied *from* TARGET; ANSI requires this.
4161 Otherwise, if TEMP is not TARGET, return TEMP
4162 if it is constant (for efficiency),
4163 or if we really want the correct value. */
4164 if (!(target && GET_CODE (target) == REG
4165 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4166 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4167 && ! rtx_equal_p (temp, target)
4168 && (CONSTANT_P (temp) || want_value))
4169 dont_return_target = 1;
4172 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4173 the same as that of TARGET, adjust the constant. This is needed, for
4174 example, in case it is a CONST_DOUBLE and we want only a word-sized
4176 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4177 && TREE_CODE (exp) != ERROR_MARK
4178 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4179 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4180 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4182 if (current_function_check_memory_usage
4183 && GET_CODE (target) == MEM
4184 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4186 in_check_memory_usage = 1;
4187 if (GET_CODE (temp) == MEM)
4188 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4189 VOIDmode, 3, XEXP (target, 0), Pmode,
4190 XEXP (temp, 0), Pmode,
4191 expr_size (exp), TYPE_MODE (sizetype));
4193 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4194 VOIDmode, 3, XEXP (target, 0), Pmode,
4195 expr_size (exp), TYPE_MODE (sizetype),
4196 GEN_INT (MEMORY_USE_WO),
4197 TYPE_MODE (integer_type_node));
4198 in_check_memory_usage = 0;
4201 /* If value was not generated in the target, store it there.
4202 Convert the value to TARGET's type first if nec. */
4203 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4204 one or both of them are volatile memory refs, we have to distinguish
4206 - expand_expr has used TARGET. In this case, we must not generate
4207 another copy. This can be detected by TARGET being equal according
4209 - expand_expr has not used TARGET - that means that the source just
4210 happens to have the same RTX form. Since temp will have been created
4211 by expand_expr, it will compare unequal according to == .
4212 We must generate a copy in this case, to reach the correct number
4213 of volatile memory references. */
4215 if ((! rtx_equal_p (temp, target)
4216 || (temp != target && (side_effects_p (temp)
4217 || side_effects_p (target))))
4218 && TREE_CODE (exp) != ERROR_MARK
4219 && ! dont_store_target)
4221 target = protect_from_queue (target, 1);
4222 if (GET_MODE (temp) != GET_MODE (target)
4223 && GET_MODE (temp) != VOIDmode)
4225 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4226 if (dont_return_target)
4228 /* In this case, we will return TEMP,
4229 so make sure it has the proper mode.
4230 But don't forget to store the value into TARGET. */
4231 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4232 emit_move_insn (target, temp);
4235 convert_move (target, temp, unsignedp);
4238 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4240 /* Handle copying a string constant into an array.
4241 The string constant may be shorter than the array.
4242 So copy just the string's actual length, and clear the rest. */
4246 /* Get the size of the data type of the string,
4247 which is actually the size of the target. */
4248 size = expr_size (exp);
4249 if (GET_CODE (size) == CONST_INT
4250 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4251 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4254 /* Compute the size of the data to copy from the string. */
4256 = size_binop (MIN_EXPR,
4257 make_tree (sizetype, size),
4258 size_int (TREE_STRING_LENGTH (exp)));
4259 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4260 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4264 /* Copy that much. */
4265 emit_block_move (target, temp, copy_size_rtx,
4266 TYPE_ALIGN (TREE_TYPE (exp)));
4268 /* Figure out how much is left in TARGET that we have to clear.
4269 Do all calculations in ptr_mode. */
4271 addr = XEXP (target, 0);
4272 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4274 if (GET_CODE (copy_size_rtx) == CONST_INT)
4276 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4277 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4279 (unsigned int) (BITS_PER_UNIT
4280 * (INTVAL (copy_size_rtx)
4281 & - INTVAL (copy_size_rtx))));
4285 addr = force_reg (ptr_mode, addr);
4286 addr = expand_binop (ptr_mode, add_optab, addr,
4287 copy_size_rtx, NULL_RTX, 0,
4290 size = expand_binop (ptr_mode, sub_optab, size,
4291 copy_size_rtx, NULL_RTX, 0,
4294 align = BITS_PER_UNIT;
4295 label = gen_label_rtx ();
4296 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4297 GET_MODE (size), 0, 0, label);
4299 align = MIN (align, expr_align (copy_size));
4301 if (size != const0_rtx)
4303 rtx dest = gen_rtx_MEM (BLKmode, addr);
4305 MEM_COPY_ATTRIBUTES (dest, target);
4307 /* Be sure we can write on ADDR. */
4308 in_check_memory_usage = 1;
4309 if (current_function_check_memory_usage)
4310 emit_library_call (chkr_check_addr_libfunc,
4311 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4313 size, TYPE_MODE (sizetype),
4314 GEN_INT (MEMORY_USE_WO),
4315 TYPE_MODE (integer_type_node));
4316 in_check_memory_usage = 0;
4317 clear_storage (dest, size, align);
4324 /* Handle calls that return values in multiple non-contiguous locations.
4325 The Irix 6 ABI has examples of this. */
4326 else if (GET_CODE (target) == PARALLEL)
4327 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4328 TYPE_ALIGN (TREE_TYPE (exp)));
4329 else if (GET_MODE (temp) == BLKmode)
4330 emit_block_move (target, temp, expr_size (exp),
4331 TYPE_ALIGN (TREE_TYPE (exp)));
4333 emit_move_insn (target, temp);
4336 /* If we don't want a value, return NULL_RTX. */
4340 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4341 ??? The latter test doesn't seem to make sense. */
4342 else if (dont_return_target && GET_CODE (temp) != MEM)
4345 /* Return TARGET itself if it is a hard register. */
4346 else if (want_value && GET_MODE (target) != BLKmode
4347 && ! (GET_CODE (target) == REG
4348 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4349 return copy_to_reg (target);
4355 /* Return 1 if EXP just contains zeros. */
4363 switch (TREE_CODE (exp))
4367 case NON_LVALUE_EXPR:
4368 return is_zeros_p (TREE_OPERAND (exp, 0));
4371 return integer_zerop (exp);
4375 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4378 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4381 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4382 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4383 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4384 if (! is_zeros_p (TREE_VALUE (elt)))
4394 /* Return 1 if EXP contains mostly (3/4) zeros. */
4397 mostly_zeros_p (exp)
4400 if (TREE_CODE (exp) == CONSTRUCTOR)
4402 int elts = 0, zeros = 0;
4403 tree elt = CONSTRUCTOR_ELTS (exp);
4404 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4406 /* If there are no ranges of true bits, it is all zero. */
4407 return elt == NULL_TREE;
4409 for (; elt; elt = TREE_CHAIN (elt))
4411 /* We do not handle the case where the index is a RANGE_EXPR,
4412 so the statistic will be somewhat inaccurate.
4413 We do make a more accurate count in store_constructor itself,
4414 so since this function is only used for nested array elements,
4415 this should be close enough. */
4416 if (mostly_zeros_p (TREE_VALUE (elt)))
4421 return 4 * zeros >= 3 * elts;
4424 return is_zeros_p (exp);
4427 /* Helper function for store_constructor.
4428 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4429 TYPE is the type of the CONSTRUCTOR, not the element type.
4430 ALIGN and CLEARED are as for store_constructor.
4431 ALIAS_SET is the alias set to use for any stores.
4433 This provides a recursive shortcut back to store_constructor when it isn't
4434 necessary to go through store_field. This is so that we can pass through
4435 the cleared field to let store_constructor know that we may not have to
4436 clear a substructure if the outer structure has already been cleared. */
4439 store_constructor_field (target, bitsize, bitpos,
4440 mode, exp, type, align, cleared, alias_set)
4442 unsigned HOST_WIDE_INT bitsize;
4443 HOST_WIDE_INT bitpos;
4444 enum machine_mode mode;
4450 if (TREE_CODE (exp) == CONSTRUCTOR
4451 && bitpos % BITS_PER_UNIT == 0
4452 /* If we have a non-zero bitpos for a register target, then we just
4453 let store_field do the bitfield handling. This is unlikely to
4454 generate unnecessary clear instructions anyways. */
4455 && (bitpos == 0 || GET_CODE (target) == MEM))
4459 = change_address (target,
4460 GET_MODE (target) == BLKmode
4462 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4463 ? BLKmode : VOIDmode,
4464 plus_constant (XEXP (target, 0),
4465 bitpos / BITS_PER_UNIT));
4468 /* Show the alignment may no longer be what it was and update the alias
4469 set, if required. */
4471 align = MIN (align, (unsigned int) bitpos & - bitpos);
4472 if (GET_CODE (target) == MEM)
4473 MEM_ALIAS_SET (target) = alias_set;
4475 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4478 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4479 int_size_in_bytes (type), alias_set);
4482 /* Store the value of constructor EXP into the rtx TARGET.
4483 TARGET is either a REG or a MEM.
4484 ALIGN is the maximum known alignment for TARGET.
4485 CLEARED is true if TARGET is known to have been zero'd.
4486 SIZE is the number of bytes of TARGET we are allowed to modify: this
4487 may not be the same as the size of EXP if we are assigning to a field
4488 which has been packed to exclude padding bits. */
4491 store_constructor (exp, target, align, cleared, size)
4498 tree type = TREE_TYPE (exp);
4499 #ifdef WORD_REGISTER_OPERATIONS
4500 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4503 /* We know our target cannot conflict, since safe_from_p has been called. */
4505 /* Don't try copying piece by piece into a hard register
4506 since that is vulnerable to being clobbered by EXP.
4507 Instead, construct in a pseudo register and then copy it all. */
4508 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4510 rtx temp = gen_reg_rtx (GET_MODE (target));
4511 store_constructor (exp, temp, align, cleared, size);
4512 emit_move_insn (target, temp);
4517 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4518 || TREE_CODE (type) == QUAL_UNION_TYPE)
4522 /* Inform later passes that the whole union value is dead. */
4523 if ((TREE_CODE (type) == UNION_TYPE
4524 || TREE_CODE (type) == QUAL_UNION_TYPE)
4527 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4529 /* If the constructor is empty, clear the union. */
4530 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4531 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4534 /* If we are building a static constructor into a register,
4535 set the initial value as zero so we can fold the value into
4536 a constant. But if more than one register is involved,
4537 this probably loses. */
4538 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4539 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4542 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4547 /* If the constructor has fewer fields than the structure
4548 or if we are initializing the structure to mostly zeros,
4549 clear the whole structure first. Don't do this is TARGET is
4550 register whose mode size isn't equal to SIZE since clear_storage
4551 can't handle this case. */
4553 && ((list_length (CONSTRUCTOR_ELTS (exp))
4554 != fields_length (type))
4555 || mostly_zeros_p (exp))
4556 && (GET_CODE (target) != REG
4557 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4560 clear_storage (target, GEN_INT (size), align);
4565 /* Inform later passes that the old value is dead. */
4566 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4568 /* Store each element of the constructor into
4569 the corresponding field of TARGET. */
4571 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4573 register tree field = TREE_PURPOSE (elt);
4574 #ifdef WORD_REGISTER_OPERATIONS
4575 tree value = TREE_VALUE (elt);
4577 register enum machine_mode mode;
4578 HOST_WIDE_INT bitsize;
4579 HOST_WIDE_INT bitpos = 0;
4582 rtx to_rtx = target;
4584 /* Just ignore missing fields.
4585 We cleared the whole structure, above,
4586 if any fields are missing. */
4590 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4593 if (host_integerp (DECL_SIZE (field), 1))
4594 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4598 unsignedp = TREE_UNSIGNED (field);
4599 mode = DECL_MODE (field);
4600 if (DECL_BIT_FIELD (field))
4603 offset = DECL_FIELD_OFFSET (field);
4604 if (host_integerp (offset, 0)
4605 && host_integerp (bit_position (field), 0))
4607 bitpos = int_bit_position (field);
4611 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4617 if (contains_placeholder_p (offset))
4618 offset = build (WITH_RECORD_EXPR, sizetype,
4619 offset, make_tree (TREE_TYPE (exp), target));
4621 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4622 if (GET_CODE (to_rtx) != MEM)
4625 if (GET_MODE (offset_rtx) != ptr_mode)
4627 #ifdef POINTERS_EXTEND_UNSIGNED
4628 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4630 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4635 = change_address (to_rtx, VOIDmode,
4636 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4637 force_reg (ptr_mode,
4639 align = DECL_OFFSET_ALIGN (field);
4642 if (TREE_READONLY (field))
4644 if (GET_CODE (to_rtx) == MEM)
4645 to_rtx = copy_rtx (to_rtx);
4647 RTX_UNCHANGING_P (to_rtx) = 1;
4650 #ifdef WORD_REGISTER_OPERATIONS
4651 /* If this initializes a field that is smaller than a word, at the
4652 start of a word, try to widen it to a full word.
4653 This special case allows us to output C++ member function
4654 initializations in a form that the optimizers can understand. */
4655 if (GET_CODE (target) == REG
4656 && bitsize < BITS_PER_WORD
4657 && bitpos % BITS_PER_WORD == 0
4658 && GET_MODE_CLASS (mode) == MODE_INT
4659 && TREE_CODE (value) == INTEGER_CST
4661 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4663 tree type = TREE_TYPE (value);
4664 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4666 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4667 value = convert (type, value);
4669 if (BYTES_BIG_ENDIAN)
4671 = fold (build (LSHIFT_EXPR, type, value,
4672 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4673 bitsize = BITS_PER_WORD;
4677 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4678 TREE_VALUE (elt), type, align, cleared,
4679 (DECL_NONADDRESSABLE_P (field)
4680 && GET_CODE (to_rtx) == MEM)
4681 ? MEM_ALIAS_SET (to_rtx)
4682 : get_alias_set (TREE_TYPE (field)));
4685 else if (TREE_CODE (type) == ARRAY_TYPE)
4690 tree domain = TYPE_DOMAIN (type);
4691 tree elttype = TREE_TYPE (type);
4692 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4693 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4694 HOST_WIDE_INT minelt;
4695 HOST_WIDE_INT maxelt;
4697 /* If we have constant bounds for the range of the type, get them. */
4700 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4701 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4704 /* If the constructor has fewer elements than the array,
4705 clear the whole array first. Similarly if this is
4706 static constructor of a non-BLKmode object. */
4707 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4711 HOST_WIDE_INT count = 0, zero_count = 0;
4712 need_to_clear = ! const_bounds_p;
4714 /* This loop is a more accurate version of the loop in
4715 mostly_zeros_p (it handles RANGE_EXPR in an index).
4716 It is also needed to check for missing elements. */
4717 for (elt = CONSTRUCTOR_ELTS (exp);
4718 elt != NULL_TREE && ! need_to_clear;
4719 elt = TREE_CHAIN (elt))
4721 tree index = TREE_PURPOSE (elt);
4722 HOST_WIDE_INT this_node_count;
4724 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4726 tree lo_index = TREE_OPERAND (index, 0);
4727 tree hi_index = TREE_OPERAND (index, 1);
4729 if (! host_integerp (lo_index, 1)
4730 || ! host_integerp (hi_index, 1))
4736 this_node_count = (tree_low_cst (hi_index, 1)
4737 - tree_low_cst (lo_index, 1) + 1);
4740 this_node_count = 1;
4742 count += this_node_count;
4743 if (mostly_zeros_p (TREE_VALUE (elt)))
4744 zero_count += this_node_count;
4747 /* Clear the entire array first if there are any missing elements,
4748 or if the incidence of zero elements is >= 75%. */
4750 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4754 if (need_to_clear && size > 0)
4757 clear_storage (target, GEN_INT (size), align);
4761 /* Inform later passes that the old value is dead. */
4762 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4764 /* Store each element of the constructor into
4765 the corresponding element of TARGET, determined
4766 by counting the elements. */
4767 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4769 elt = TREE_CHAIN (elt), i++)
4771 register enum machine_mode mode;
4772 HOST_WIDE_INT bitsize;
4773 HOST_WIDE_INT bitpos;
4775 tree value = TREE_VALUE (elt);
4776 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4777 tree index = TREE_PURPOSE (elt);
4778 rtx xtarget = target;
4780 if (cleared && is_zeros_p (value))
4783 unsignedp = TREE_UNSIGNED (elttype);
4784 mode = TYPE_MODE (elttype);
4785 if (mode == BLKmode)
4786 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4787 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4790 bitsize = GET_MODE_BITSIZE (mode);
4792 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4794 tree lo_index = TREE_OPERAND (index, 0);
4795 tree hi_index = TREE_OPERAND (index, 1);
4796 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4797 struct nesting *loop;
4798 HOST_WIDE_INT lo, hi, count;
4801 /* If the range is constant and "small", unroll the loop. */
4803 && host_integerp (lo_index, 0)
4804 && host_integerp (hi_index, 0)
4805 && (lo = tree_low_cst (lo_index, 0),
4806 hi = tree_low_cst (hi_index, 0),
4807 count = hi - lo + 1,
4808 (GET_CODE (target) != MEM
4810 || (host_integerp (TYPE_SIZE (elttype), 1)
4811 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4814 lo -= minelt; hi -= minelt;
4815 for (; lo <= hi; lo++)
4817 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4818 store_constructor_field
4819 (target, bitsize, bitpos, mode, value, type, align,
4821 TYPE_NONALIASED_COMPONENT (type)
4822 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4827 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4828 loop_top = gen_label_rtx ();
4829 loop_end = gen_label_rtx ();
4831 unsignedp = TREE_UNSIGNED (domain);
4833 index = build_decl (VAR_DECL, NULL_TREE, domain);
4836 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4838 SET_DECL_RTL (index, index_r);
4839 if (TREE_CODE (value) == SAVE_EXPR
4840 && SAVE_EXPR_RTL (value) == 0)
4842 /* Make sure value gets expanded once before the
4844 expand_expr (value, const0_rtx, VOIDmode, 0);
4847 store_expr (lo_index, index_r, 0);
4848 loop = expand_start_loop (0);
4850 /* Assign value to element index. */
4852 = convert (ssizetype,
4853 fold (build (MINUS_EXPR, TREE_TYPE (index),
4854 index, TYPE_MIN_VALUE (domain))));
4855 position = size_binop (MULT_EXPR, position,
4857 TYPE_SIZE_UNIT (elttype)));
4859 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4860 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4861 xtarget = change_address (target, mode, addr);
4862 if (TREE_CODE (value) == CONSTRUCTOR)
4863 store_constructor (value, xtarget, align, cleared,
4864 bitsize / BITS_PER_UNIT);
4866 store_expr (value, xtarget, 0);
4868 expand_exit_loop_if_false (loop,
4869 build (LT_EXPR, integer_type_node,
4872 expand_increment (build (PREINCREMENT_EXPR,
4874 index, integer_one_node), 0, 0);
4876 emit_label (loop_end);
4879 else if ((index != 0 && ! host_integerp (index, 0))
4880 || ! host_integerp (TYPE_SIZE (elttype), 1))
4886 index = ssize_int (1);
4889 index = convert (ssizetype,
4890 fold (build (MINUS_EXPR, index,
4891 TYPE_MIN_VALUE (domain))));
4893 position = size_binop (MULT_EXPR, index,
4895 TYPE_SIZE_UNIT (elttype)));
4896 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4897 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4898 xtarget = change_address (target, mode, addr);
4899 store_expr (value, xtarget, 0);
4904 bitpos = ((tree_low_cst (index, 0) - minelt)
4905 * tree_low_cst (TYPE_SIZE (elttype), 1));
4907 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4909 store_constructor_field (target, bitsize, bitpos, mode, value,
4910 type, align, cleared,
4911 TYPE_NONALIASED_COMPONENT (type)
4912 && GET_CODE (target) == MEM
4913 ? MEM_ALIAS_SET (target) :
4914 get_alias_set (elttype));
4920 /* Set constructor assignments. */
4921 else if (TREE_CODE (type) == SET_TYPE)
4923 tree elt = CONSTRUCTOR_ELTS (exp);
4924 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4925 tree domain = TYPE_DOMAIN (type);
4926 tree domain_min, domain_max, bitlength;
4928 /* The default implementation strategy is to extract the constant
4929 parts of the constructor, use that to initialize the target,
4930 and then "or" in whatever non-constant ranges we need in addition.
4932 If a large set is all zero or all ones, it is
4933 probably better to set it using memset (if available) or bzero.
4934 Also, if a large set has just a single range, it may also be
4935 better to first clear all the first clear the set (using
4936 bzero/memset), and set the bits we want. */
4938 /* Check for all zeros. */
4939 if (elt == NULL_TREE && size > 0)
4942 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4946 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4947 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4948 bitlength = size_binop (PLUS_EXPR,
4949 size_diffop (domain_max, domain_min),
4952 nbits = tree_low_cst (bitlength, 1);
4954 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4955 are "complicated" (more than one range), initialize (the
4956 constant parts) by copying from a constant. */
4957 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4958 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4960 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4961 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4962 char *bit_buffer = (char *) alloca (nbits);
4963 HOST_WIDE_INT word = 0;
4964 unsigned int bit_pos = 0;
4965 unsigned int ibit = 0;
4966 unsigned int offset = 0; /* In bytes from beginning of set. */
4968 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4971 if (bit_buffer[ibit])
4973 if (BYTES_BIG_ENDIAN)
4974 word |= (1 << (set_word_size - 1 - bit_pos));
4976 word |= 1 << bit_pos;
4980 if (bit_pos >= set_word_size || ibit == nbits)
4982 if (word != 0 || ! cleared)
4984 rtx datum = GEN_INT (word);
4987 /* The assumption here is that it is safe to use
4988 XEXP if the set is multi-word, but not if
4989 it's single-word. */
4990 if (GET_CODE (target) == MEM)
4992 to_rtx = plus_constant (XEXP (target, 0), offset);
4993 to_rtx = change_address (target, mode, to_rtx);
4995 else if (offset == 0)
4999 emit_move_insn (to_rtx, datum);
5006 offset += set_word_size / BITS_PER_UNIT;
5011 /* Don't bother clearing storage if the set is all ones. */
5012 if (TREE_CHAIN (elt) != NULL_TREE
5013 || (TREE_PURPOSE (elt) == NULL_TREE
5015 : ( ! host_integerp (TREE_VALUE (elt), 0)
5016 || ! host_integerp (TREE_PURPOSE (elt), 0)
5017 || (tree_low_cst (TREE_VALUE (elt), 0)
5018 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5019 != (HOST_WIDE_INT) nbits))))
5020 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5022 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5024 /* Start of range of element or NULL. */
5025 tree startbit = TREE_PURPOSE (elt);
5026 /* End of range of element, or element value. */
5027 tree endbit = TREE_VALUE (elt);
5028 #ifdef TARGET_MEM_FUNCTIONS
5029 HOST_WIDE_INT startb, endb;
5031 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5033 bitlength_rtx = expand_expr (bitlength,
5034 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5036 /* Handle non-range tuple element like [ expr ]. */
5037 if (startbit == NULL_TREE)
5039 startbit = save_expr (endbit);
5043 startbit = convert (sizetype, startbit);
5044 endbit = convert (sizetype, endbit);
5045 if (! integer_zerop (domain_min))
5047 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5048 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5050 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5051 EXPAND_CONST_ADDRESS);
5052 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5053 EXPAND_CONST_ADDRESS);
5059 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5062 emit_move_insn (targetx, target);
5065 else if (GET_CODE (target) == MEM)
5070 #ifdef TARGET_MEM_FUNCTIONS
5071 /* Optimization: If startbit and endbit are
5072 constants divisible by BITS_PER_UNIT,
5073 call memset instead. */
5074 if (TREE_CODE (startbit) == INTEGER_CST
5075 && TREE_CODE (endbit) == INTEGER_CST
5076 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5077 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5079 emit_library_call (memset_libfunc, LCT_NORMAL,
5081 plus_constant (XEXP (targetx, 0),
5082 startb / BITS_PER_UNIT),
5084 constm1_rtx, TYPE_MODE (integer_type_node),
5085 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5086 TYPE_MODE (sizetype));
5090 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5091 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5092 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5093 startbit_rtx, TYPE_MODE (sizetype),
5094 endbit_rtx, TYPE_MODE (sizetype));
5097 emit_move_insn (target, targetx);
5105 /* Store the value of EXP (an expression tree)
5106 into a subfield of TARGET which has mode MODE and occupies
5107 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5108 If MODE is VOIDmode, it means that we are storing into a bit-field.
5110 If VALUE_MODE is VOIDmode, return nothing in particular.
5111 UNSIGNEDP is not used in this case.
5113 Otherwise, return an rtx for the value stored. This rtx
5114 has mode VALUE_MODE if that is convenient to do.
5115 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5117 ALIGN is the alignment that TARGET is known to have.
5118 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5120 ALIAS_SET is the alias set for the destination. This value will
5121 (in general) be different from that for TARGET, since TARGET is a
5122 reference to the containing structure. */
5125 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5126 unsignedp, align, total_size, alias_set)
5128 HOST_WIDE_INT bitsize;
5129 HOST_WIDE_INT bitpos;
5130 enum machine_mode mode;
5132 enum machine_mode value_mode;
5135 HOST_WIDE_INT total_size;
5138 HOST_WIDE_INT width_mask = 0;
5140 if (TREE_CODE (exp) == ERROR_MARK)
5143 /* If we have nothing to store, do nothing unless the expression has
5146 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5148 if (bitsize < HOST_BITS_PER_WIDE_INT)
5149 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5151 /* If we are storing into an unaligned field of an aligned union that is
5152 in a register, we may have the mode of TARGET being an integer mode but
5153 MODE == BLKmode. In that case, get an aligned object whose size and
5154 alignment are the same as TARGET and store TARGET into it (we can avoid
5155 the store if the field being stored is the entire width of TARGET). Then
5156 call ourselves recursively to store the field into a BLKmode version of
5157 that object. Finally, load from the object into TARGET. This is not
5158 very efficient in general, but should only be slightly more expensive
5159 than the otherwise-required unaligned accesses. Perhaps this can be
5160 cleaned up later. */
5163 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5167 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5170 rtx blk_object = copy_rtx (object);
5172 PUT_MODE (blk_object, BLKmode);
5174 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5175 emit_move_insn (object, target);
5177 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5178 align, total_size, alias_set);
5180 /* Even though we aren't returning target, we need to
5181 give it the updated value. */
5182 emit_move_insn (target, object);
5187 if (GET_CODE (target) == CONCAT)
5189 /* We're storing into a struct containing a single __complex. */
5193 return store_expr (exp, target, 0);
5196 /* If the structure is in a register or if the component
5197 is a bit field, we cannot use addressing to access it.
5198 Use bit-field techniques or SUBREG to store in it. */
5200 if (mode == VOIDmode
5201 || (mode != BLKmode && ! direct_store[(int) mode]
5202 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5203 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5204 || GET_CODE (target) == REG
5205 || GET_CODE (target) == SUBREG
5206 /* If the field isn't aligned enough to store as an ordinary memref,
5207 store it as a bit field. */
5208 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5209 && (align < GET_MODE_ALIGNMENT (mode)
5210 || bitpos % GET_MODE_ALIGNMENT (mode)))
5211 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5212 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5213 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5214 /* If the RHS and field are a constant size and the size of the
5215 RHS isn't the same size as the bitfield, we must use bitfield
5218 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5219 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5221 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5223 /* If BITSIZE is narrower than the size of the type of EXP
5224 we will be narrowing TEMP. Normally, what's wanted are the
5225 low-order bits. However, if EXP's type is a record and this is
5226 big-endian machine, we want the upper BITSIZE bits. */
5227 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5228 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5229 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5230 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5231 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5235 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5237 if (mode != VOIDmode && mode != BLKmode
5238 && mode != TYPE_MODE (TREE_TYPE (exp)))
5239 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5241 /* If the modes of TARGET and TEMP are both BLKmode, both
5242 must be in memory and BITPOS must be aligned on a byte
5243 boundary. If so, we simply do a block copy. */
5244 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5246 unsigned int exp_align = expr_align (exp);
5248 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5249 || bitpos % BITS_PER_UNIT != 0)
5252 target = change_address (target, VOIDmode,
5253 plus_constant (XEXP (target, 0),
5254 bitpos / BITS_PER_UNIT));
5256 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5257 align = MIN (exp_align, align);
5259 /* Find an alignment that is consistent with the bit position. */
5260 while ((bitpos % align) != 0)
5263 emit_block_move (target, temp,
5264 bitsize == -1 ? expr_size (exp)
5265 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5269 return value_mode == VOIDmode ? const0_rtx : target;
5272 /* Store the value in the bitfield. */
5273 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5274 if (value_mode != VOIDmode)
5276 /* The caller wants an rtx for the value. */
5277 /* If possible, avoid refetching from the bitfield itself. */
5279 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5282 enum machine_mode tmode;
5285 return expand_and (temp,
5289 GET_MODE (temp) == VOIDmode
5291 : GET_MODE (temp))), NULL_RTX);
5292 tmode = GET_MODE (temp);
5293 if (tmode == VOIDmode)
5295 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5296 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5297 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5299 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5300 NULL_RTX, value_mode, 0, align,
5307 rtx addr = XEXP (target, 0);
5310 /* If a value is wanted, it must be the lhs;
5311 so make the address stable for multiple use. */
5313 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5314 && ! CONSTANT_ADDRESS_P (addr)
5315 /* A frame-pointer reference is already stable. */
5316 && ! (GET_CODE (addr) == PLUS
5317 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5318 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5319 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5320 addr = copy_to_reg (addr);
5322 /* Now build a reference to just the desired component. */
5324 to_rtx = copy_rtx (change_address (target, mode,
5325 plus_constant (addr,
5327 / BITS_PER_UNIT))));
5328 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5329 MEM_ALIAS_SET (to_rtx) = alias_set;
5331 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5335 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5336 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5337 ARRAY_REFs and find the ultimate containing object, which we return.
5339 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5340 bit position, and *PUNSIGNEDP to the signedness of the field.
5341 If the position of the field is variable, we store a tree
5342 giving the variable offset (in units) in *POFFSET.
5343 This offset is in addition to the bit position.
5344 If the position is not variable, we store 0 in *POFFSET.
5345 We set *PALIGNMENT to the alignment of the address that will be
5346 computed. This is the alignment of the thing we return if *POFFSET
5347 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5349 If any of the extraction expressions is volatile,
5350 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5352 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5353 is a mode that can be used to access the field. In that case, *PBITSIZE
5356 If the field describes a variable-sized object, *PMODE is set to
5357 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5358 this case, but the address of the object can be found. */
5361 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5362 punsignedp, pvolatilep, palignment)
5364 HOST_WIDE_INT *pbitsize;
5365 HOST_WIDE_INT *pbitpos;
5367 enum machine_mode *pmode;
5370 unsigned int *palignment;
5373 enum machine_mode mode = VOIDmode;
5374 tree offset = size_zero_node;
5375 tree bit_offset = bitsize_zero_node;
5376 unsigned int alignment = BIGGEST_ALIGNMENT;
5379 /* First get the mode, signedness, and size. We do this from just the
5380 outermost expression. */
5381 if (TREE_CODE (exp) == COMPONENT_REF)
5383 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5384 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5385 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5387 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5389 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5391 size_tree = TREE_OPERAND (exp, 1);
5392 *punsignedp = TREE_UNSIGNED (exp);
5396 mode = TYPE_MODE (TREE_TYPE (exp));
5397 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5399 if (mode == BLKmode)
5400 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5402 *pbitsize = GET_MODE_BITSIZE (mode);
5407 if (! host_integerp (size_tree, 1))
5408 mode = BLKmode, *pbitsize = -1;
5410 *pbitsize = tree_low_cst (size_tree, 1);
5413 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5414 and find the ultimate containing object. */
5417 if (TREE_CODE (exp) == BIT_FIELD_REF)
5418 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5419 else if (TREE_CODE (exp) == COMPONENT_REF)
5421 tree field = TREE_OPERAND (exp, 1);
5422 tree this_offset = DECL_FIELD_OFFSET (field);
5424 /* If this field hasn't been filled in yet, don't go
5425 past it. This should only happen when folding expressions
5426 made during type construction. */
5427 if (this_offset == 0)
5429 else if (! TREE_CONSTANT (this_offset)
5430 && contains_placeholder_p (this_offset))
5431 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5433 offset = size_binop (PLUS_EXPR, offset, this_offset);
5434 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5435 DECL_FIELD_BIT_OFFSET (field));
5437 if (! host_integerp (offset, 0))
5438 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5441 else if (TREE_CODE (exp) == ARRAY_REF)
5443 tree index = TREE_OPERAND (exp, 1);
5444 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5445 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5446 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5448 /* We assume all arrays have sizes that are a multiple of a byte.
5449 First subtract the lower bound, if any, in the type of the
5450 index, then convert to sizetype and multiply by the size of the
5452 if (low_bound != 0 && ! integer_zerop (low_bound))
5453 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5456 /* If the index has a self-referential type, pass it to a
5457 WITH_RECORD_EXPR; if the component size is, pass our
5458 component to one. */
5459 if (! TREE_CONSTANT (index)
5460 && contains_placeholder_p (index))
5461 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5462 if (! TREE_CONSTANT (unit_size)
5463 && contains_placeholder_p (unit_size))
5464 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5465 TREE_OPERAND (exp, 0));
5467 offset = size_binop (PLUS_EXPR, offset,
5468 size_binop (MULT_EXPR,
5469 convert (sizetype, index),
5473 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5474 && ! ((TREE_CODE (exp) == NOP_EXPR
5475 || TREE_CODE (exp) == CONVERT_EXPR)
5476 && (TYPE_MODE (TREE_TYPE (exp))
5477 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5480 /* If any reference in the chain is volatile, the effect is volatile. */
5481 if (TREE_THIS_VOLATILE (exp))
5484 /* If the offset is non-constant already, then we can't assume any
5485 alignment more than the alignment here. */
5486 if (! TREE_CONSTANT (offset))
5487 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5489 exp = TREE_OPERAND (exp, 0);
5493 alignment = MIN (alignment, DECL_ALIGN (exp));
5494 else if (TREE_TYPE (exp) != 0)
5495 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5497 /* If OFFSET is constant, see if we can return the whole thing as a
5498 constant bit position. Otherwise, split it up. */
5499 if (host_integerp (offset, 0)
5500 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5502 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5503 && host_integerp (tem, 0))
5504 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5506 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5509 *palignment = alignment;
5513 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5515 static enum memory_use_mode
5516 get_memory_usage_from_modifier (modifier)
5517 enum expand_modifier modifier;
5523 return MEMORY_USE_RO;
5525 case EXPAND_MEMORY_USE_WO:
5526 return MEMORY_USE_WO;
5528 case EXPAND_MEMORY_USE_RW:
5529 return MEMORY_USE_RW;
5531 case EXPAND_MEMORY_USE_DONT:
5532 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5533 MEMORY_USE_DONT, because they are modifiers to a call of
5534 expand_expr in the ADDR_EXPR case of expand_expr. */
5535 case EXPAND_CONST_ADDRESS:
5536 case EXPAND_INITIALIZER:
5537 return MEMORY_USE_DONT;
5538 case EXPAND_MEMORY_USE_BAD:
5544 /* Given an rtx VALUE that may contain additions and multiplications, return
5545 an equivalent value that just refers to a register, memory, or constant.
5546 This is done by generating instructions to perform the arithmetic and
5547 returning a pseudo-register containing the value.
5549 The returned value may be a REG, SUBREG, MEM or constant. */
5552 force_operand (value, target)
5555 register optab binoptab = 0;
5556 /* Use a temporary to force order of execution of calls to
5560 /* Use subtarget as the target for operand 0 of a binary operation. */
5561 register rtx subtarget = get_subtarget (target);
5563 /* Check for a PIC address load. */
5565 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5566 && XEXP (value, 0) == pic_offset_table_rtx
5567 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5568 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5569 || GET_CODE (XEXP (value, 1)) == CONST))
5572 subtarget = gen_reg_rtx (GET_MODE (value));
5573 emit_move_insn (subtarget, value);
5577 if (GET_CODE (value) == PLUS)
5578 binoptab = add_optab;
5579 else if (GET_CODE (value) == MINUS)
5580 binoptab = sub_optab;
5581 else if (GET_CODE (value) == MULT)
5583 op2 = XEXP (value, 1);
5584 if (!CONSTANT_P (op2)
5585 && !(GET_CODE (op2) == REG && op2 != subtarget))
5587 tmp = force_operand (XEXP (value, 0), subtarget);
5588 return expand_mult (GET_MODE (value), tmp,
5589 force_operand (op2, NULL_RTX),
5595 op2 = XEXP (value, 1);
5596 if (!CONSTANT_P (op2)
5597 && !(GET_CODE (op2) == REG && op2 != subtarget))
5599 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5601 binoptab = add_optab;
5602 op2 = negate_rtx (GET_MODE (value), op2);
5605 /* Check for an addition with OP2 a constant integer and our first
5606 operand a PLUS of a virtual register and something else. In that
5607 case, we want to emit the sum of the virtual register and the
5608 constant first and then add the other value. This allows virtual
5609 register instantiation to simply modify the constant rather than
5610 creating another one around this addition. */
5611 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5612 && GET_CODE (XEXP (value, 0)) == PLUS
5613 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5614 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5615 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5617 rtx temp = expand_binop (GET_MODE (value), binoptab,
5618 XEXP (XEXP (value, 0), 0), op2,
5619 subtarget, 0, OPTAB_LIB_WIDEN);
5620 return expand_binop (GET_MODE (value), binoptab, temp,
5621 force_operand (XEXP (XEXP (value, 0), 1), 0),
5622 target, 0, OPTAB_LIB_WIDEN);
5625 tmp = force_operand (XEXP (value, 0), subtarget);
5626 return expand_binop (GET_MODE (value), binoptab, tmp,
5627 force_operand (op2, NULL_RTX),
5628 target, 0, OPTAB_LIB_WIDEN);
5629 /* We give UNSIGNEDP = 0 to expand_binop
5630 because the only operations we are expanding here are signed ones. */
5635 /* Subroutine of expand_expr:
5636 save the non-copied parts (LIST) of an expr (LHS), and return a list
5637 which can restore these values to their previous values,
5638 should something modify their storage. */
5641 save_noncopied_parts (lhs, list)
5648 for (tail = list; tail; tail = TREE_CHAIN (tail))
5649 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5650 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5653 tree part = TREE_VALUE (tail);
5654 tree part_type = TREE_TYPE (part);
5655 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5657 = assign_temp (build_qualified_type (part_type,
5658 (TYPE_QUALS (part_type)
5659 | TYPE_QUAL_CONST)),
5662 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5663 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5664 parts = tree_cons (to_be_saved,
5665 build (RTL_EXPR, part_type, NULL_TREE,
5668 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5673 /* Subroutine of expand_expr:
5674 record the non-copied parts (LIST) of an expr (LHS), and return a list
5675 which specifies the initial values of these parts. */
5678 init_noncopied_parts (lhs, list)
5685 for (tail = list; tail; tail = TREE_CHAIN (tail))
5686 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5687 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5688 else if (TREE_PURPOSE (tail))
5690 tree part = TREE_VALUE (tail);
5691 tree part_type = TREE_TYPE (part);
5692 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5693 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5698 /* Subroutine of expand_expr: return nonzero iff there is no way that
5699 EXP can reference X, which is being modified. TOP_P is nonzero if this
5700 call is going to be used to determine whether we need a temporary
5701 for EXP, as opposed to a recursive call to this function.
5703 It is always safe for this routine to return zero since it merely
5704 searches for optimization opportunities. */
5707 safe_from_p (x, exp, top_p)
5714 static tree save_expr_list;
5717 /* If EXP has varying size, we MUST use a target since we currently
5718 have no way of allocating temporaries of variable size
5719 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5720 So we assume here that something at a higher level has prevented a
5721 clash. This is somewhat bogus, but the best we can do. Only
5722 do this when X is BLKmode and when we are at the top level. */
5723 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5724 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5725 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5726 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5727 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5729 && GET_MODE (x) == BLKmode)
5730 /* If X is in the outgoing argument area, it is always safe. */
5731 || (GET_CODE (x) == MEM
5732 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5733 || (GET_CODE (XEXP (x, 0)) == PLUS
5734 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5737 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5738 find the underlying pseudo. */
5739 if (GET_CODE (x) == SUBREG)
5742 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5746 /* A SAVE_EXPR might appear many times in the expression passed to the
5747 top-level safe_from_p call, and if it has a complex subexpression,
5748 examining it multiple times could result in a combinatorial explosion.
5749 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5750 with optimization took about 28 minutes to compile -- even though it was
5751 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5752 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5753 we have processed. Note that the only test of top_p was above. */
5762 rtn = safe_from_p (x, exp, 0);
5764 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5765 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5770 /* Now look at our tree code and possibly recurse. */
5771 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5774 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5781 if (TREE_CODE (exp) == TREE_LIST)
5782 return ((TREE_VALUE (exp) == 0
5783 || safe_from_p (x, TREE_VALUE (exp), 0))
5784 && (TREE_CHAIN (exp) == 0
5785 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5786 else if (TREE_CODE (exp) == ERROR_MARK)
5787 return 1; /* An already-visited SAVE_EXPR? */
5792 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5796 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5797 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5801 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5802 the expression. If it is set, we conflict iff we are that rtx or
5803 both are in memory. Otherwise, we check all operands of the
5804 expression recursively. */
5806 switch (TREE_CODE (exp))
5809 return (staticp (TREE_OPERAND (exp, 0))
5810 || TREE_STATIC (exp)
5811 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5814 if (GET_CODE (x) == MEM
5815 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5816 get_alias_set (exp)))
5821 /* Assume that the call will clobber all hard registers and
5823 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5824 || GET_CODE (x) == MEM)
5829 /* If a sequence exists, we would have to scan every instruction
5830 in the sequence to see if it was safe. This is probably not
5832 if (RTL_EXPR_SEQUENCE (exp))
5835 exp_rtl = RTL_EXPR_RTL (exp);
5838 case WITH_CLEANUP_EXPR:
5839 exp_rtl = RTL_EXPR_RTL (exp);
5842 case CLEANUP_POINT_EXPR:
5843 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5846 exp_rtl = SAVE_EXPR_RTL (exp);
5850 /* If we've already scanned this, don't do it again. Otherwise,
5851 show we've scanned it and record for clearing the flag if we're
5853 if (TREE_PRIVATE (exp))
5856 TREE_PRIVATE (exp) = 1;
5857 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5859 TREE_PRIVATE (exp) = 0;
5863 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5867 /* The only operand we look at is operand 1. The rest aren't
5868 part of the expression. */
5869 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5871 case METHOD_CALL_EXPR:
5872 /* This takes a rtx argument, but shouldn't appear here. */
5879 /* If we have an rtx, we do not need to scan our operands. */
5883 nops = first_rtl_op (TREE_CODE (exp));
5884 for (i = 0; i < nops; i++)
5885 if (TREE_OPERAND (exp, i) != 0
5886 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5889 /* If this is a language-specific tree code, it may require
5890 special handling. */
5891 if ((unsigned int) TREE_CODE (exp)
5892 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5894 && !(*lang_safe_from_p) (x, exp))
5898 /* If we have an rtl, find any enclosed object. Then see if we conflict
5902 if (GET_CODE (exp_rtl) == SUBREG)
5904 exp_rtl = SUBREG_REG (exp_rtl);
5905 if (GET_CODE (exp_rtl) == REG
5906 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5910 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5911 are memory and they conflict. */
5912 return ! (rtx_equal_p (x, exp_rtl)
5913 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5914 && true_dependence (exp_rtl, GET_MODE (x), x,
5915 rtx_addr_varies_p)));
5918 /* If we reach here, it is safe. */
5922 /* Subroutine of expand_expr: return nonzero iff EXP is an
5923 expression whose type is statically determinable. */
5929 if (TREE_CODE (exp) == PARM_DECL
5930 || TREE_CODE (exp) == VAR_DECL
5931 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5932 || TREE_CODE (exp) == COMPONENT_REF
5933 || TREE_CODE (exp) == ARRAY_REF)
5938 /* Subroutine of expand_expr: return rtx if EXP is a
5939 variable or parameter; else return 0. */
5946 switch (TREE_CODE (exp))
5950 return DECL_RTL (exp);
5956 #ifdef MAX_INTEGER_COMPUTATION_MODE
5959 check_max_integer_computation_mode (exp)
5962 enum tree_code code;
5963 enum machine_mode mode;
5965 /* Strip any NOPs that don't change the mode. */
5967 code = TREE_CODE (exp);
5969 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5970 if (code == NOP_EXPR
5971 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5974 /* First check the type of the overall operation. We need only look at
5975 unary, binary and relational operations. */
5976 if (TREE_CODE_CLASS (code) == '1'
5977 || TREE_CODE_CLASS (code) == '2'
5978 || TREE_CODE_CLASS (code) == '<')
5980 mode = TYPE_MODE (TREE_TYPE (exp));
5981 if (GET_MODE_CLASS (mode) == MODE_INT
5982 && mode > MAX_INTEGER_COMPUTATION_MODE)
5983 internal_error ("unsupported wide integer operation");
5986 /* Check operand of a unary op. */
5987 if (TREE_CODE_CLASS (code) == '1')
5989 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5990 if (GET_MODE_CLASS (mode) == MODE_INT
5991 && mode > MAX_INTEGER_COMPUTATION_MODE)
5992 internal_error ("unsupported wide integer operation");
5995 /* Check operands of a binary/comparison op. */
5996 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5998 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5999 if (GET_MODE_CLASS (mode) == MODE_INT
6000 && mode > MAX_INTEGER_COMPUTATION_MODE)
6001 internal_error ("unsupported wide integer operation");
6003 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6004 if (GET_MODE_CLASS (mode) == MODE_INT
6005 && mode > MAX_INTEGER_COMPUTATION_MODE)
6006 internal_error ("unsupported wide integer operation");
6011 /* expand_expr: generate code for computing expression EXP.
6012 An rtx for the computed value is returned. The value is never null.
6013 In the case of a void EXP, const0_rtx is returned.
6015 The value may be stored in TARGET if TARGET is nonzero.
6016 TARGET is just a suggestion; callers must assume that
6017 the rtx returned may not be the same as TARGET.
6019 If TARGET is CONST0_RTX, it means that the value will be ignored.
6021 If TMODE is not VOIDmode, it suggests generating the
6022 result in mode TMODE. But this is done only when convenient.
6023 Otherwise, TMODE is ignored and the value generated in its natural mode.
6024 TMODE is just a suggestion; callers must assume that
6025 the rtx returned may not have mode TMODE.
6027 Note that TARGET may have neither TMODE nor MODE. In that case, it
6028 probably will not be used.
6030 If MODIFIER is EXPAND_SUM then when EXP is an addition
6031 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6032 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6033 products as above, or REG or MEM, or constant.
6034 Ordinarily in such cases we would output mul or add instructions
6035 and then return a pseudo reg containing the sum.
6037 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6038 it also marks a label as absolutely required (it can't be dead).
6039 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6040 This is used for outputting expressions used in initializers.
6042 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6043 with a constant address even if that address is not normally legitimate.
6044 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6047 expand_expr (exp, target, tmode, modifier)
6050 enum machine_mode tmode;
6051 enum expand_modifier modifier;
6053 register rtx op0, op1, temp;
6054 tree type = TREE_TYPE (exp);
6055 int unsignedp = TREE_UNSIGNED (type);
6056 register enum machine_mode mode;
6057 register enum tree_code code = TREE_CODE (exp);
6059 rtx subtarget, original_target;
6062 /* Used by check-memory-usage to make modifier read only. */
6063 enum expand_modifier ro_modifier;
6065 /* Handle ERROR_MARK before anybody tries to access its type. */
6066 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6068 op0 = CONST0_RTX (tmode);
6074 mode = TYPE_MODE (type);
6075 /* Use subtarget as the target for operand 0 of a binary operation. */
6076 subtarget = get_subtarget (target);
6077 original_target = target;
6078 ignore = (target == const0_rtx
6079 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6080 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6081 || code == COND_EXPR)
6082 && TREE_CODE (type) == VOID_TYPE));
6084 /* Make a read-only version of the modifier. */
6085 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6086 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6087 ro_modifier = modifier;
6089 ro_modifier = EXPAND_NORMAL;
6091 /* If we are going to ignore this result, we need only do something
6092 if there is a side-effect somewhere in the expression. If there
6093 is, short-circuit the most common cases here. Note that we must
6094 not call expand_expr with anything but const0_rtx in case this
6095 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6099 if (! TREE_SIDE_EFFECTS (exp))
6102 /* Ensure we reference a volatile object even if value is ignored, but
6103 don't do this if all we are doing is taking its address. */
6104 if (TREE_THIS_VOLATILE (exp)
6105 && TREE_CODE (exp) != FUNCTION_DECL
6106 && mode != VOIDmode && mode != BLKmode
6107 && modifier != EXPAND_CONST_ADDRESS)
6109 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6110 if (GET_CODE (temp) == MEM)
6111 temp = copy_to_reg (temp);
6115 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6116 || code == INDIRECT_REF || code == BUFFER_REF)
6117 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6118 VOIDmode, ro_modifier);
6119 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6120 || code == ARRAY_REF)
6122 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6123 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6126 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6127 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6128 /* If the second operand has no side effects, just evaluate
6130 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6131 VOIDmode, ro_modifier);
6132 else if (code == BIT_FIELD_REF)
6134 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6135 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6136 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
6143 #ifdef MAX_INTEGER_COMPUTATION_MODE
6144 /* Only check stuff here if the mode we want is different from the mode
6145 of the expression; if it's the same, check_max_integer_computiation_mode
6146 will handle it. Do we really need to check this stuff at all? */
6149 && GET_MODE (target) != mode
6150 && TREE_CODE (exp) != INTEGER_CST
6151 && TREE_CODE (exp) != PARM_DECL
6152 && TREE_CODE (exp) != ARRAY_REF
6153 && TREE_CODE (exp) != COMPONENT_REF
6154 && TREE_CODE (exp) != BIT_FIELD_REF
6155 && TREE_CODE (exp) != INDIRECT_REF
6156 && TREE_CODE (exp) != CALL_EXPR
6157 && TREE_CODE (exp) != VAR_DECL
6158 && TREE_CODE (exp) != RTL_EXPR)
6160 enum machine_mode mode = GET_MODE (target);
6162 if (GET_MODE_CLASS (mode) == MODE_INT
6163 && mode > MAX_INTEGER_COMPUTATION_MODE)
6164 internal_error ("unsupported wide integer operation");
6168 && TREE_CODE (exp) != INTEGER_CST
6169 && TREE_CODE (exp) != PARM_DECL
6170 && TREE_CODE (exp) != ARRAY_REF
6171 && TREE_CODE (exp) != COMPONENT_REF
6172 && TREE_CODE (exp) != BIT_FIELD_REF
6173 && TREE_CODE (exp) != INDIRECT_REF
6174 && TREE_CODE (exp) != VAR_DECL
6175 && TREE_CODE (exp) != CALL_EXPR
6176 && TREE_CODE (exp) != RTL_EXPR
6177 && GET_MODE_CLASS (tmode) == MODE_INT
6178 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6179 internal_error ("unsupported wide integer operation");
6181 check_max_integer_computation_mode (exp);
6184 /* If will do cse, generate all results into pseudo registers
6185 since 1) that allows cse to find more things
6186 and 2) otherwise cse could produce an insn the machine
6189 if (! cse_not_expected && mode != BLKmode && target
6190 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6197 tree function = decl_function_context (exp);
6198 /* Handle using a label in a containing function. */
6199 if (function != current_function_decl
6200 && function != inline_function_decl && function != 0)
6202 struct function *p = find_function_data (function);
6203 p->expr->x_forced_labels
6204 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6205 p->expr->x_forced_labels);
6209 if (modifier == EXPAND_INITIALIZER)
6210 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6215 temp = gen_rtx_MEM (FUNCTION_MODE,
6216 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6217 if (function != current_function_decl
6218 && function != inline_function_decl && function != 0)
6219 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6224 if (DECL_RTL (exp) == 0)
6226 error_with_decl (exp, "prior parameter's size depends on `%s'");
6227 return CONST0_RTX (mode);
6230 /* ... fall through ... */
6233 /* If a static var's type was incomplete when the decl was written,
6234 but the type is complete now, lay out the decl now. */
6235 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6236 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6238 layout_decl (exp, 0);
6239 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6242 /* Although static-storage variables start off initialized, according to
6243 ANSI C, a memcpy could overwrite them with uninitialized values. So
6244 we check them too. This also lets us check for read-only variables
6245 accessed via a non-const declaration, in case it won't be detected
6246 any other way (e.g., in an embedded system or OS kernel without
6249 Aggregates are not checked here; they're handled elsewhere. */
6250 if (cfun && current_function_check_memory_usage
6252 && GET_CODE (DECL_RTL (exp)) == MEM
6253 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6255 enum memory_use_mode memory_usage;
6256 memory_usage = get_memory_usage_from_modifier (modifier);
6258 in_check_memory_usage = 1;
6259 if (memory_usage != MEMORY_USE_DONT)
6260 emit_library_call (chkr_check_addr_libfunc,
6261 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6262 XEXP (DECL_RTL (exp), 0), Pmode,
6263 GEN_INT (int_size_in_bytes (type)),
6264 TYPE_MODE (sizetype),
6265 GEN_INT (memory_usage),
6266 TYPE_MODE (integer_type_node));
6267 in_check_memory_usage = 0;
6270 /* ... fall through ... */
6274 if (DECL_RTL (exp) == 0)
6277 /* Ensure variable marked as used even if it doesn't go through
6278 a parser. If it hasn't be used yet, write out an external
6280 if (! TREE_USED (exp))
6282 assemble_external (exp);
6283 TREE_USED (exp) = 1;
6286 /* Show we haven't gotten RTL for this yet. */
6289 /* Handle variables inherited from containing functions. */
6290 context = decl_function_context (exp);
6292 /* We treat inline_function_decl as an alias for the current function
6293 because that is the inline function whose vars, types, etc.
6294 are being merged into the current function.
6295 See expand_inline_function. */
6297 if (context != 0 && context != current_function_decl
6298 && context != inline_function_decl
6299 /* If var is static, we don't need a static chain to access it. */
6300 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6301 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6305 /* Mark as non-local and addressable. */
6306 DECL_NONLOCAL (exp) = 1;
6307 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6309 mark_addressable (exp);
6310 if (GET_CODE (DECL_RTL (exp)) != MEM)
6312 addr = XEXP (DECL_RTL (exp), 0);
6313 if (GET_CODE (addr) == MEM)
6314 addr = change_address (addr, Pmode,
6315 fix_lexical_addr (XEXP (addr, 0), exp));
6317 addr = fix_lexical_addr (addr, exp);
6319 temp = change_address (DECL_RTL (exp), mode, addr);
6322 /* This is the case of an array whose size is to be determined
6323 from its initializer, while the initializer is still being parsed.
6326 else if (GET_CODE (DECL_RTL (exp)) == MEM
6327 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6328 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6329 XEXP (DECL_RTL (exp), 0));
6331 /* If DECL_RTL is memory, we are in the normal case and either
6332 the address is not valid or it is not a register and -fforce-addr
6333 is specified, get the address into a register. */
6335 else if (GET_CODE (DECL_RTL (exp)) == MEM
6336 && modifier != EXPAND_CONST_ADDRESS
6337 && modifier != EXPAND_SUM
6338 && modifier != EXPAND_INITIALIZER
6339 && (! memory_address_p (DECL_MODE (exp),
6340 XEXP (DECL_RTL (exp), 0))
6342 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6343 temp = change_address (DECL_RTL (exp), VOIDmode,
6344 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6346 /* If we got something, return it. But first, set the alignment
6347 the address is a register. */
6350 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6351 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6356 /* If the mode of DECL_RTL does not match that of the decl, it
6357 must be a promoted value. We return a SUBREG of the wanted mode,
6358 but mark it so that we know that it was already extended. */
6360 if (GET_CODE (DECL_RTL (exp)) == REG
6361 && GET_MODE (DECL_RTL (exp)) != mode)
6363 /* Get the signedness used for this variable. Ensure we get the
6364 same mode we got when the variable was declared. */
6365 if (GET_MODE (DECL_RTL (exp))
6366 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6369 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6370 SUBREG_PROMOTED_VAR_P (temp) = 1;
6371 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6375 return DECL_RTL (exp);
6378 return immed_double_const (TREE_INT_CST_LOW (exp),
6379 TREE_INT_CST_HIGH (exp), mode);
6382 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6383 EXPAND_MEMORY_USE_BAD);
6386 /* If optimized, generate immediate CONST_DOUBLE
6387 which will be turned into memory by reload if necessary.
6389 We used to force a register so that loop.c could see it. But
6390 this does not allow gen_* patterns to perform optimizations with
6391 the constants. It also produces two insns in cases like "x = 1.0;".
6392 On most machines, floating-point constants are not permitted in
6393 many insns, so we'd end up copying it to a register in any case.
6395 Now, we do the copying in expand_binop, if appropriate. */
6396 return immed_real_const (exp);
6400 if (! TREE_CST_RTL (exp))
6401 output_constant_def (exp, 1);
6403 /* TREE_CST_RTL probably contains a constant address.
6404 On RISC machines where a constant address isn't valid,
6405 make some insns to get that address into a register. */
6406 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6407 && modifier != EXPAND_CONST_ADDRESS
6408 && modifier != EXPAND_INITIALIZER
6409 && modifier != EXPAND_SUM
6410 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6412 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6413 return change_address (TREE_CST_RTL (exp), VOIDmode,
6414 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6415 return TREE_CST_RTL (exp);
6417 case EXPR_WITH_FILE_LOCATION:
6420 const char *saved_input_filename = input_filename;
6421 int saved_lineno = lineno;
6422 input_filename = EXPR_WFL_FILENAME (exp);
6423 lineno = EXPR_WFL_LINENO (exp);
6424 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6425 emit_line_note (input_filename, lineno);
6426 /* Possibly avoid switching back and force here. */
6427 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6428 input_filename = saved_input_filename;
6429 lineno = saved_lineno;
6434 context = decl_function_context (exp);
6436 /* If this SAVE_EXPR was at global context, assume we are an
6437 initialization function and move it into our context. */
6439 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6441 /* We treat inline_function_decl as an alias for the current function
6442 because that is the inline function whose vars, types, etc.
6443 are being merged into the current function.
6444 See expand_inline_function. */
6445 if (context == current_function_decl || context == inline_function_decl)
6448 /* If this is non-local, handle it. */
6451 /* The following call just exists to abort if the context is
6452 not of a containing function. */
6453 find_function_data (context);
6455 temp = SAVE_EXPR_RTL (exp);
6456 if (temp && GET_CODE (temp) == REG)
6458 put_var_into_stack (exp);
6459 temp = SAVE_EXPR_RTL (exp);
6461 if (temp == 0 || GET_CODE (temp) != MEM)
6463 return change_address (temp, mode,
6464 fix_lexical_addr (XEXP (temp, 0), exp));
6466 if (SAVE_EXPR_RTL (exp) == 0)
6468 if (mode == VOIDmode)
6471 temp = assign_temp (build_qualified_type (type,
6473 | TYPE_QUAL_CONST)),
6476 SAVE_EXPR_RTL (exp) = temp;
6477 if (!optimize && GET_CODE (temp) == REG)
6478 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6481 /* If the mode of TEMP does not match that of the expression, it
6482 must be a promoted value. We pass store_expr a SUBREG of the
6483 wanted mode but mark it so that we know that it was already
6484 extended. Note that `unsignedp' was modified above in
6487 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6489 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6490 SUBREG_PROMOTED_VAR_P (temp) = 1;
6491 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6494 if (temp == const0_rtx)
6495 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6496 EXPAND_MEMORY_USE_BAD);
6498 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6500 TREE_USED (exp) = 1;
6503 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6504 must be a promoted value. We return a SUBREG of the wanted mode,
6505 but mark it so that we know that it was already extended. */
6507 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6508 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6510 /* Compute the signedness and make the proper SUBREG. */
6511 promote_mode (type, mode, &unsignedp, 0);
6512 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6513 SUBREG_PROMOTED_VAR_P (temp) = 1;
6514 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6518 return SAVE_EXPR_RTL (exp);
6523 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6524 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6528 case PLACEHOLDER_EXPR:
6530 tree placeholder_expr;
6532 /* If there is an object on the head of the placeholder list,
6533 see if some object in it of type TYPE or a pointer to it. For
6534 further information, see tree.def. */
6535 for (placeholder_expr = placeholder_list;
6536 placeholder_expr != 0;
6537 placeholder_expr = TREE_CHAIN (placeholder_expr))
6539 tree need_type = TYPE_MAIN_VARIANT (type);
6541 tree old_list = placeholder_list;
6544 /* Find the outermost reference that is of the type we want.
6545 If none, see if any object has a type that is a pointer to
6546 the type we want. */
6547 for (elt = TREE_PURPOSE (placeholder_expr);
6548 elt != 0 && object == 0;
6550 = ((TREE_CODE (elt) == COMPOUND_EXPR
6551 || TREE_CODE (elt) == COND_EXPR)
6552 ? TREE_OPERAND (elt, 1)
6553 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6554 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6555 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6556 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6557 ? TREE_OPERAND (elt, 0) : 0))
6558 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6561 for (elt = TREE_PURPOSE (placeholder_expr);
6562 elt != 0 && object == 0;
6564 = ((TREE_CODE (elt) == COMPOUND_EXPR
6565 || TREE_CODE (elt) == COND_EXPR)
6566 ? TREE_OPERAND (elt, 1)
6567 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6568 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6569 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6570 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6571 ? TREE_OPERAND (elt, 0) : 0))
6572 if (POINTER_TYPE_P (TREE_TYPE (elt))
6573 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6575 object = build1 (INDIRECT_REF, need_type, elt);
6579 /* Expand this object skipping the list entries before
6580 it was found in case it is also a PLACEHOLDER_EXPR.
6581 In that case, we want to translate it using subsequent
6583 placeholder_list = TREE_CHAIN (placeholder_expr);
6584 temp = expand_expr (object, original_target, tmode,
6586 placeholder_list = old_list;
6592 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6595 case WITH_RECORD_EXPR:
6596 /* Put the object on the placeholder list, expand our first operand,
6597 and pop the list. */
6598 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6600 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6601 tmode, ro_modifier);
6602 placeholder_list = TREE_CHAIN (placeholder_list);
6606 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6607 expand_goto (TREE_OPERAND (exp, 0));
6609 expand_computed_goto (TREE_OPERAND (exp, 0));
6613 expand_exit_loop_if_false (NULL,
6614 invert_truthvalue (TREE_OPERAND (exp, 0)));
6617 case LABELED_BLOCK_EXPR:
6618 if (LABELED_BLOCK_BODY (exp))
6619 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6620 /* Should perhaps use expand_label, but this is simpler and safer. */
6621 do_pending_stack_adjust ();
6622 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6625 case EXIT_BLOCK_EXPR:
6626 if (EXIT_BLOCK_RETURN (exp))
6627 sorry ("returned value in block_exit_expr");
6628 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6633 expand_start_loop (1);
6634 expand_expr_stmt (TREE_OPERAND (exp, 0));
6642 tree vars = TREE_OPERAND (exp, 0);
6643 int vars_need_expansion = 0;
6645 /* Need to open a binding contour here because
6646 if there are any cleanups they must be contained here. */
6647 expand_start_bindings (2);
6649 /* Mark the corresponding BLOCK for output in its proper place. */
6650 if (TREE_OPERAND (exp, 2) != 0
6651 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6652 insert_block (TREE_OPERAND (exp, 2));
6654 /* If VARS have not yet been expanded, expand them now. */
6657 if (!DECL_RTL_SET_P (vars))
6659 vars_need_expansion = 1;
6662 expand_decl_init (vars);
6663 vars = TREE_CHAIN (vars);
6666 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6668 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6674 if (RTL_EXPR_SEQUENCE (exp))
6676 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6678 emit_insns (RTL_EXPR_SEQUENCE (exp));
6679 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6681 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6682 free_temps_for_rtl_expr (exp);
6683 return RTL_EXPR_RTL (exp);
6686 /* If we don't need the result, just ensure we evaluate any
6691 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6692 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6693 EXPAND_MEMORY_USE_BAD);
6697 /* All elts simple constants => refer to a constant in memory. But
6698 if this is a non-BLKmode mode, let it store a field at a time
6699 since that should make a CONST_INT or CONST_DOUBLE when we
6700 fold. Likewise, if we have a target we can use, it is best to
6701 store directly into the target unless the type is large enough
6702 that memcpy will be used. If we are making an initializer and
6703 all operands are constant, put it in memory as well. */
6704 else if ((TREE_STATIC (exp)
6705 && ((mode == BLKmode
6706 && ! (target != 0 && safe_from_p (target, exp, 1)))
6707 || TREE_ADDRESSABLE (exp)
6708 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6709 && (! MOVE_BY_PIECES_P
6710 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6712 && ! mostly_zeros_p (exp))))
6713 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6715 rtx constructor = output_constant_def (exp, 1);
6717 if (modifier != EXPAND_CONST_ADDRESS
6718 && modifier != EXPAND_INITIALIZER
6719 && modifier != EXPAND_SUM
6720 && (! memory_address_p (GET_MODE (constructor),
6721 XEXP (constructor, 0))
6723 && GET_CODE (XEXP (constructor, 0)) != REG)))
6724 constructor = change_address (constructor, VOIDmode,
6725 XEXP (constructor, 0));
6730 /* Handle calls that pass values in multiple non-contiguous
6731 locations. The Irix 6 ABI has examples of this. */
6732 if (target == 0 || ! safe_from_p (target, exp, 1)
6733 || GET_CODE (target) == PARALLEL)
6735 = assign_temp (build_qualified_type (type,
6737 | (TREE_READONLY (exp)
6738 * TYPE_QUAL_CONST))),
6739 TREE_ADDRESSABLE (exp), 1, 1);
6741 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6742 int_size_in_bytes (TREE_TYPE (exp)));
6748 tree exp1 = TREE_OPERAND (exp, 0);
6750 tree string = string_constant (exp1, &index);
6752 /* Try to optimize reads from const strings. */
6754 && TREE_CODE (string) == STRING_CST
6755 && TREE_CODE (index) == INTEGER_CST
6756 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6757 && GET_MODE_CLASS (mode) == MODE_INT
6758 && GET_MODE_SIZE (mode) == 1
6759 && modifier != EXPAND_MEMORY_USE_WO)
6761 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6763 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6764 op0 = memory_address (mode, op0);
6766 if (cfun && current_function_check_memory_usage
6767 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6769 enum memory_use_mode memory_usage;
6770 memory_usage = get_memory_usage_from_modifier (modifier);
6772 if (memory_usage != MEMORY_USE_DONT)
6774 in_check_memory_usage = 1;
6775 emit_library_call (chkr_check_addr_libfunc,
6776 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6777 Pmode, GEN_INT (int_size_in_bytes (type)),
6778 TYPE_MODE (sizetype),
6779 GEN_INT (memory_usage),
6780 TYPE_MODE (integer_type_node));
6781 in_check_memory_usage = 0;
6785 temp = gen_rtx_MEM (mode, op0);
6786 set_mem_attributes (temp, exp, 0);
6788 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6789 here, because, in C and C++, the fact that a location is accessed
6790 through a pointer to const does not mean that the value there can
6791 never change. Languages where it can never change should
6792 also set TREE_STATIC. */
6793 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6795 /* If we are writing to this object and its type is a record with
6796 readonly fields, we must mark it as readonly so it will
6797 conflict with readonly references to those fields. */
6798 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6799 RTX_UNCHANGING_P (temp) = 1;
6805 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6809 tree array = TREE_OPERAND (exp, 0);
6810 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6811 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6812 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6815 /* Optimize the special-case of a zero lower bound.
6817 We convert the low_bound to sizetype to avoid some problems
6818 with constant folding. (E.g. suppose the lower bound is 1,
6819 and its mode is QI. Without the conversion, (ARRAY
6820 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6821 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6823 if (! integer_zerop (low_bound))
6824 index = size_diffop (index, convert (sizetype, low_bound));
6826 /* Fold an expression like: "foo"[2].
6827 This is not done in fold so it won't happen inside &.
6828 Don't fold if this is for wide characters since it's too
6829 difficult to do correctly and this is a very rare case. */
6831 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6832 && TREE_CODE (array) == STRING_CST
6833 && TREE_CODE (index) == INTEGER_CST
6834 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6835 && GET_MODE_CLASS (mode) == MODE_INT
6836 && GET_MODE_SIZE (mode) == 1)
6838 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6840 /* If this is a constant index into a constant array,
6841 just get the value from the array. Handle both the cases when
6842 we have an explicit constructor and when our operand is a variable
6843 that was declared const. */
6845 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6846 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6847 && TREE_CODE (index) == INTEGER_CST
6848 && 0 > compare_tree_int (index,
6849 list_length (CONSTRUCTOR_ELTS
6850 (TREE_OPERAND (exp, 0)))))
6854 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6855 i = TREE_INT_CST_LOW (index);
6856 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6860 return expand_expr (fold (TREE_VALUE (elem)), target,
6861 tmode, ro_modifier);
6864 else if (optimize >= 1
6865 && modifier != EXPAND_CONST_ADDRESS
6866 && modifier != EXPAND_INITIALIZER
6867 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6868 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6869 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6871 if (TREE_CODE (index) == INTEGER_CST)
6873 tree init = DECL_INITIAL (array);
6875 if (TREE_CODE (init) == CONSTRUCTOR)
6879 for (elem = CONSTRUCTOR_ELTS (init);
6881 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6882 elem = TREE_CHAIN (elem))
6886 return expand_expr (fold (TREE_VALUE (elem)), target,
6887 tmode, ro_modifier);
6889 else if (TREE_CODE (init) == STRING_CST
6890 && 0 > compare_tree_int (index,
6891 TREE_STRING_LENGTH (init)))
6893 tree type = TREE_TYPE (TREE_TYPE (init));
6894 enum machine_mode mode = TYPE_MODE (type);
6896 if (GET_MODE_CLASS (mode) == MODE_INT
6897 && GET_MODE_SIZE (mode) == 1)
6899 (TREE_STRING_POINTER
6900 (init)[TREE_INT_CST_LOW (index)]));
6909 /* If the operand is a CONSTRUCTOR, we can just extract the
6910 appropriate field if it is present. Don't do this if we have
6911 already written the data since we want to refer to that copy
6912 and varasm.c assumes that's what we'll do. */
6913 if (code != ARRAY_REF
6914 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6915 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6919 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6920 elt = TREE_CHAIN (elt))
6921 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6922 /* We can normally use the value of the field in the
6923 CONSTRUCTOR. However, if this is a bitfield in
6924 an integral mode that we can fit in a HOST_WIDE_INT,
6925 we must mask only the number of bits in the bitfield,
6926 since this is done implicitly by the constructor. If
6927 the bitfield does not meet either of those conditions,
6928 we can't do this optimization. */
6929 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6930 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6932 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6933 <= HOST_BITS_PER_WIDE_INT))))
6935 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6936 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6938 HOST_WIDE_INT bitsize
6939 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6941 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6943 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6944 op0 = expand_and (op0, op1, target);
6948 enum machine_mode imode
6949 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6951 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6954 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6956 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6966 enum machine_mode mode1;
6967 HOST_WIDE_INT bitsize, bitpos;
6970 unsigned int alignment;
6971 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6972 &mode1, &unsignedp, &volatilep,
6975 /* If we got back the original object, something is wrong. Perhaps
6976 we are evaluating an expression too early. In any event, don't
6977 infinitely recurse. */
6981 /* If TEM's type is a union of variable size, pass TARGET to the inner
6982 computation, since it will need a temporary and TARGET is known
6983 to have to do. This occurs in unchecked conversion in Ada. */
6985 op0 = expand_expr (tem,
6986 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6987 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6989 ? target : NULL_RTX),
6991 (modifier == EXPAND_INITIALIZER
6992 || modifier == EXPAND_CONST_ADDRESS)
6993 ? modifier : EXPAND_NORMAL);
6995 /* If this is a constant, put it into a register if it is a
6996 legitimate constant and OFFSET is 0 and memory if it isn't. */
6997 if (CONSTANT_P (op0))
6999 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7000 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7002 op0 = force_reg (mode, op0);
7004 op0 = validize_mem (force_const_mem (mode, op0));
7009 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7011 /* If this object is in memory, put it into a register.
7012 This case can't occur in C, but can in Ada if we have
7013 unchecked conversion of an expression from a scalar type to
7014 an array or record type. */
7015 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7016 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7018 tree nt = build_qualified_type (TREE_TYPE (tem),
7019 (TYPE_QUALS (TREE_TYPE (tem))
7020 | TYPE_QUAL_CONST));
7021 rtx memloc = assign_temp (nt, 1, 1, 1);
7023 mark_temp_addr_taken (memloc);
7024 emit_move_insn (memloc, op0);
7028 if (GET_CODE (op0) != MEM)
7031 if (GET_MODE (offset_rtx) != ptr_mode)
7033 #ifdef POINTERS_EXTEND_UNSIGNED
7034 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7036 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7040 /* A constant address in OP0 can have VOIDmode, we must not try
7041 to call force_reg for that case. Avoid that case. */
7042 if (GET_CODE (op0) == MEM
7043 && GET_MODE (op0) == BLKmode
7044 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7046 && (bitpos % bitsize) == 0
7047 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7048 && alignment == GET_MODE_ALIGNMENT (mode1))
7050 rtx temp = change_address (op0, mode1,
7051 plus_constant (XEXP (op0, 0),
7054 if (GET_CODE (XEXP (temp, 0)) == REG)
7057 op0 = change_address (op0, mode1,
7058 force_reg (GET_MODE (XEXP (temp, 0)),
7063 op0 = change_address (op0, VOIDmode,
7064 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7065 force_reg (ptr_mode,
7069 /* Don't forget about volatility even if this is a bitfield. */
7070 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7072 op0 = copy_rtx (op0);
7073 MEM_VOLATILE_P (op0) = 1;
7076 /* Check the access. */
7077 if (cfun != 0 && current_function_check_memory_usage
7078 && GET_CODE (op0) == MEM)
7080 enum memory_use_mode memory_usage;
7081 memory_usage = get_memory_usage_from_modifier (modifier);
7083 if (memory_usage != MEMORY_USE_DONT)
7088 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7089 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7091 /* Check the access right of the pointer. */
7092 in_check_memory_usage = 1;
7093 if (size > BITS_PER_UNIT)
7094 emit_library_call (chkr_check_addr_libfunc,
7095 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7096 Pmode, GEN_INT (size / BITS_PER_UNIT),
7097 TYPE_MODE (sizetype),
7098 GEN_INT (memory_usage),
7099 TYPE_MODE (integer_type_node));
7100 in_check_memory_usage = 0;
7104 /* In cases where an aligned union has an unaligned object
7105 as a field, we might be extracting a BLKmode value from
7106 an integer-mode (e.g., SImode) object. Handle this case
7107 by doing the extract into an object as wide as the field
7108 (which we know to be the width of a basic mode), then
7109 storing into memory, and changing the mode to BLKmode. */
7110 if (mode1 == VOIDmode
7111 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7112 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7113 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7114 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
7115 /* If the field isn't aligned enough to fetch as a memref,
7116 fetch it as a bit field. */
7117 || (mode1 != BLKmode
7118 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7119 && ((TYPE_ALIGN (TREE_TYPE (tem))
7120 < GET_MODE_ALIGNMENT (mode))
7121 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7122 /* If the type and the field are a constant size and the
7123 size of the type isn't the same size as the bitfield,
7124 we must use bitfield operations. */
7126 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7128 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7131 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7132 && (TYPE_ALIGN (type) > alignment
7133 || bitpos % TYPE_ALIGN (type) != 0)))
7135 enum machine_mode ext_mode = mode;
7137 if (ext_mode == BLKmode
7138 && ! (target != 0 && GET_CODE (op0) == MEM
7139 && GET_CODE (target) == MEM
7140 && bitpos % BITS_PER_UNIT == 0))
7141 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7143 if (ext_mode == BLKmode)
7145 /* In this case, BITPOS must start at a byte boundary and
7146 TARGET, if specified, must be a MEM. */
7147 if (GET_CODE (op0) != MEM
7148 || (target != 0 && GET_CODE (target) != MEM)
7149 || bitpos % BITS_PER_UNIT != 0)
7152 op0 = change_address (op0, VOIDmode,
7153 plus_constant (XEXP (op0, 0),
7154 bitpos / BITS_PER_UNIT));
7156 target = assign_temp (type, 0, 1, 1);
7158 emit_block_move (target, op0,
7159 bitsize == -1 ? expr_size (exp)
7160 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7167 op0 = validize_mem (op0);
7169 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7170 mark_reg_pointer (XEXP (op0, 0), alignment);
7172 op0 = extract_bit_field (op0, bitsize, bitpos,
7173 unsignedp, target, ext_mode, ext_mode,
7175 int_size_in_bytes (TREE_TYPE (tem)));
7177 /* If the result is a record type and BITSIZE is narrower than
7178 the mode of OP0, an integral mode, and this is a big endian
7179 machine, we must put the field into the high-order bits. */
7180 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7181 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7182 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7183 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7184 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7188 if (mode == BLKmode)
7190 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7192 rtx new = assign_temp (nt, 0, 1, 1);
7194 emit_move_insn (new, op0);
7195 op0 = copy_rtx (new);
7196 PUT_MODE (op0, BLKmode);
7202 /* If the result is BLKmode, use that to access the object
7204 if (mode == BLKmode)
7207 /* Get a reference to just this component. */
7208 if (modifier == EXPAND_CONST_ADDRESS
7209 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7211 rtx new = gen_rtx_MEM (mode1,
7212 plus_constant (XEXP (op0, 0),
7213 (bitpos / BITS_PER_UNIT)));
7215 MEM_COPY_ATTRIBUTES (new, op0);
7219 op0 = change_address (op0, mode1,
7220 plus_constant (XEXP (op0, 0),
7221 (bitpos / BITS_PER_UNIT)));
7223 set_mem_attributes (op0, exp, 0);
7224 if (GET_CODE (XEXP (op0, 0)) == REG)
7225 mark_reg_pointer (XEXP (op0, 0), alignment);
7227 MEM_VOLATILE_P (op0) |= volatilep;
7228 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7229 || modifier == EXPAND_CONST_ADDRESS
7230 || modifier == EXPAND_INITIALIZER)
7232 else if (target == 0)
7233 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7235 convert_move (target, op0, unsignedp);
7239 /* Intended for a reference to a buffer of a file-object in Pascal.
7240 But it's not certain that a special tree code will really be
7241 necessary for these. INDIRECT_REF might work for them. */
7247 /* Pascal set IN expression.
7250 rlo = set_low - (set_low%bits_per_word);
7251 the_word = set [ (index - rlo)/bits_per_word ];
7252 bit_index = index % bits_per_word;
7253 bitmask = 1 << bit_index;
7254 return !!(the_word & bitmask); */
7256 tree set = TREE_OPERAND (exp, 0);
7257 tree index = TREE_OPERAND (exp, 1);
7258 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7259 tree set_type = TREE_TYPE (set);
7260 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7261 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7262 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7263 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7264 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7265 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7266 rtx setaddr = XEXP (setval, 0);
7267 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7269 rtx diff, quo, rem, addr, bit, result;
7271 /* If domain is empty, answer is no. Likewise if index is constant
7272 and out of bounds. */
7273 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7274 && TREE_CODE (set_low_bound) == INTEGER_CST
7275 && tree_int_cst_lt (set_high_bound, set_low_bound))
7276 || (TREE_CODE (index) == INTEGER_CST
7277 && TREE_CODE (set_low_bound) == INTEGER_CST
7278 && tree_int_cst_lt (index, set_low_bound))
7279 || (TREE_CODE (set_high_bound) == INTEGER_CST
7280 && TREE_CODE (index) == INTEGER_CST
7281 && tree_int_cst_lt (set_high_bound, index))))
7285 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7287 /* If we get here, we have to generate the code for both cases
7288 (in range and out of range). */
7290 op0 = gen_label_rtx ();
7291 op1 = gen_label_rtx ();
7293 if (! (GET_CODE (index_val) == CONST_INT
7294 && GET_CODE (lo_r) == CONST_INT))
7296 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7297 GET_MODE (index_val), iunsignedp, 0, op1);
7300 if (! (GET_CODE (index_val) == CONST_INT
7301 && GET_CODE (hi_r) == CONST_INT))
7303 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7304 GET_MODE (index_val), iunsignedp, 0, op1);
7307 /* Calculate the element number of bit zero in the first word
7309 if (GET_CODE (lo_r) == CONST_INT)
7310 rlow = GEN_INT (INTVAL (lo_r)
7311 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7313 rlow = expand_binop (index_mode, and_optab, lo_r,
7314 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7315 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7317 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7318 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7320 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7321 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7322 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7323 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7325 addr = memory_address (byte_mode,
7326 expand_binop (index_mode, add_optab, diff,
7327 setaddr, NULL_RTX, iunsignedp,
7330 /* Extract the bit we want to examine. */
7331 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7332 gen_rtx_MEM (byte_mode, addr),
7333 make_tree (TREE_TYPE (index), rem),
7335 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7336 GET_MODE (target) == byte_mode ? target : 0,
7337 1, OPTAB_LIB_WIDEN);
7339 if (result != target)
7340 convert_move (target, result, 1);
7342 /* Output the code to handle the out-of-range case. */
7345 emit_move_insn (target, const0_rtx);
7350 case WITH_CLEANUP_EXPR:
7351 if (RTL_EXPR_RTL (exp) == 0)
7354 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7355 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7357 /* That's it for this cleanup. */
7358 TREE_OPERAND (exp, 2) = 0;
7360 return RTL_EXPR_RTL (exp);
7362 case CLEANUP_POINT_EXPR:
7364 /* Start a new binding layer that will keep track of all cleanup
7365 actions to be performed. */
7366 expand_start_bindings (2);
7368 target_temp_slot_level = temp_slot_level;
7370 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7371 /* If we're going to use this value, load it up now. */
7373 op0 = force_not_mem (op0);
7374 preserve_temp_slots (op0);
7375 expand_end_bindings (NULL_TREE, 0, 0);
7380 /* Check for a built-in function. */
7381 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7382 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7384 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7386 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7387 == BUILT_IN_FRONTEND)
7388 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7390 return expand_builtin (exp, target, subtarget, tmode, ignore);
7393 return expand_call (exp, target, ignore);
7395 case NON_LVALUE_EXPR:
7398 case REFERENCE_EXPR:
7399 if (TREE_OPERAND (exp, 0) == error_mark_node)
7402 if (TREE_CODE (type) == UNION_TYPE)
7404 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7406 /* If both input and output are BLKmode, this conversion
7407 isn't actually doing anything unless we need to make the
7408 alignment stricter. */
7409 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7410 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7411 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7412 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7416 target = assign_temp (type, 0, 1, 1);
7418 if (GET_CODE (target) == MEM)
7419 /* Store data into beginning of memory target. */
7420 store_expr (TREE_OPERAND (exp, 0),
7421 change_address (target, TYPE_MODE (valtype), 0), 0);
7423 else if (GET_CODE (target) == REG)
7424 /* Store this field into a union of the proper type. */
7425 store_field (target,
7426 MIN ((int_size_in_bytes (TREE_TYPE
7427 (TREE_OPERAND (exp, 0)))
7429 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7430 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7431 VOIDmode, 0, BITS_PER_UNIT,
7432 int_size_in_bytes (type), 0);
7436 /* Return the entire union. */
7440 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7442 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7445 /* If the signedness of the conversion differs and OP0 is
7446 a promoted SUBREG, clear that indication since we now
7447 have to do the proper extension. */
7448 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7449 && GET_CODE (op0) == SUBREG)
7450 SUBREG_PROMOTED_VAR_P (op0) = 0;
7455 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7456 if (GET_MODE (op0) == mode)
7459 /* If OP0 is a constant, just convert it into the proper mode. */
7460 if (CONSTANT_P (op0))
7462 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7463 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7465 if (modifier == EXPAND_INITIALIZER)
7466 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7470 convert_to_mode (mode, op0,
7471 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7473 convert_move (target, op0,
7474 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7478 /* We come here from MINUS_EXPR when the second operand is a
7481 this_optab = ! unsignedp && flag_trapv
7482 && (GET_MODE_CLASS(mode) == MODE_INT)
7483 ? addv_optab : add_optab;
7485 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7486 something else, make sure we add the register to the constant and
7487 then to the other thing. This case can occur during strength
7488 reduction and doing it this way will produce better code if the
7489 frame pointer or argument pointer is eliminated.
7491 fold-const.c will ensure that the constant is always in the inner
7492 PLUS_EXPR, so the only case we need to do anything about is if
7493 sp, ap, or fp is our second argument, in which case we must swap
7494 the innermost first argument and our second argument. */
7496 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7497 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7498 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7499 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7500 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7501 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7503 tree t = TREE_OPERAND (exp, 1);
7505 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7506 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7509 /* If the result is to be ptr_mode and we are adding an integer to
7510 something, we might be forming a constant. So try to use
7511 plus_constant. If it produces a sum and we can't accept it,
7512 use force_operand. This allows P = &ARR[const] to generate
7513 efficient code on machines where a SYMBOL_REF is not a valid
7516 If this is an EXPAND_SUM call, always return the sum. */
7517 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7518 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7520 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7521 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7522 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7526 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7528 /* Use immed_double_const to ensure that the constant is
7529 truncated according to the mode of OP1, then sign extended
7530 to a HOST_WIDE_INT. Using the constant directly can result
7531 in non-canonical RTL in a 64x32 cross compile. */
7533 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7535 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7536 op1 = plus_constant (op1, INTVAL (constant_part));
7537 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7538 op1 = force_operand (op1, target);
7542 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7543 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7544 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7548 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7550 if (! CONSTANT_P (op0))
7552 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7553 VOIDmode, modifier);
7554 /* Don't go to both_summands if modifier
7555 says it's not right to return a PLUS. */
7556 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7560 /* Use immed_double_const to ensure that the constant is
7561 truncated according to the mode of OP1, then sign extended
7562 to a HOST_WIDE_INT. Using the constant directly can result
7563 in non-canonical RTL in a 64x32 cross compile. */
7565 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7567 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7568 op0 = plus_constant (op0, INTVAL (constant_part));
7569 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7570 op0 = force_operand (op0, target);
7575 /* No sense saving up arithmetic to be done
7576 if it's all in the wrong mode to form part of an address.
7577 And force_operand won't know whether to sign-extend or
7579 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7580 || mode != ptr_mode)
7583 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7586 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7587 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7590 /* Make sure any term that's a sum with a constant comes last. */
7591 if (GET_CODE (op0) == PLUS
7592 && CONSTANT_P (XEXP (op0, 1)))
7598 /* If adding to a sum including a constant,
7599 associate it to put the constant outside. */
7600 if (GET_CODE (op1) == PLUS
7601 && CONSTANT_P (XEXP (op1, 1)))
7603 rtx constant_term = const0_rtx;
7605 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7608 /* Ensure that MULT comes first if there is one. */
7609 else if (GET_CODE (op0) == MULT)
7610 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7612 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7614 /* Let's also eliminate constants from op0 if possible. */
7615 op0 = eliminate_constant_term (op0, &constant_term);
7617 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7618 their sum should be a constant. Form it into OP1, since the
7619 result we want will then be OP0 + OP1. */
7621 temp = simplify_binary_operation (PLUS, mode, constant_term,
7626 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7629 /* Put a constant term last and put a multiplication first. */
7630 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7631 temp = op1, op1 = op0, op0 = temp;
7633 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7634 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7637 /* For initializers, we are allowed to return a MINUS of two
7638 symbolic constants. Here we handle all cases when both operands
7640 /* Handle difference of two symbolic constants,
7641 for the sake of an initializer. */
7642 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7643 && really_constant_p (TREE_OPERAND (exp, 0))
7644 && really_constant_p (TREE_OPERAND (exp, 1)))
7646 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7647 VOIDmode, ro_modifier);
7648 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7649 VOIDmode, ro_modifier);
7651 /* If the last operand is a CONST_INT, use plus_constant of
7652 the negated constant. Else make the MINUS. */
7653 if (GET_CODE (op1) == CONST_INT)
7654 return plus_constant (op0, - INTVAL (op1));
7656 return gen_rtx_MINUS (mode, op0, op1);
7658 /* Convert A - const to A + (-const). */
7659 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7661 tree negated = fold (build1 (NEGATE_EXPR, type,
7662 TREE_OPERAND (exp, 1)));
7664 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7665 /* If we can't negate the constant in TYPE, leave it alone and
7666 expand_binop will negate it for us. We used to try to do it
7667 here in the signed version of TYPE, but that doesn't work
7668 on POINTER_TYPEs. */;
7671 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7675 this_optab = ! unsignedp && flag_trapv
7676 && (GET_MODE_CLASS(mode) == MODE_INT)
7677 ? subv_optab : sub_optab;
7681 /* If first operand is constant, swap them.
7682 Thus the following special case checks need only
7683 check the second operand. */
7684 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7686 register tree t1 = TREE_OPERAND (exp, 0);
7687 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7688 TREE_OPERAND (exp, 1) = t1;
7691 /* Attempt to return something suitable for generating an
7692 indexed address, for machines that support that. */
7694 if (modifier == EXPAND_SUM && mode == ptr_mode
7695 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7696 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7698 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7701 /* Apply distributive law if OP0 is x+c. */
7702 if (GET_CODE (op0) == PLUS
7703 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7708 (mode, XEXP (op0, 0),
7709 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7710 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7711 * INTVAL (XEXP (op0, 1))));
7713 if (GET_CODE (op0) != REG)
7714 op0 = force_operand (op0, NULL_RTX);
7715 if (GET_CODE (op0) != REG)
7716 op0 = copy_to_mode_reg (mode, op0);
7719 gen_rtx_MULT (mode, op0,
7720 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7723 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7726 /* Check for multiplying things that have been extended
7727 from a narrower type. If this machine supports multiplying
7728 in that narrower type with a result in the desired type,
7729 do it that way, and avoid the explicit type-conversion. */
7730 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7731 && TREE_CODE (type) == INTEGER_TYPE
7732 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7733 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7734 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7735 && int_fits_type_p (TREE_OPERAND (exp, 1),
7736 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7737 /* Don't use a widening multiply if a shift will do. */
7738 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7739 > HOST_BITS_PER_WIDE_INT)
7740 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7742 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7743 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7745 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7746 /* If both operands are extended, they must either both
7747 be zero-extended or both be sign-extended. */
7748 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7750 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7752 enum machine_mode innermode
7753 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7754 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7755 ? smul_widen_optab : umul_widen_optab);
7756 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7757 ? umul_widen_optab : smul_widen_optab);
7758 if (mode == GET_MODE_WIDER_MODE (innermode))
7760 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7762 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7763 NULL_RTX, VOIDmode, 0);
7764 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7765 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7768 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7769 NULL_RTX, VOIDmode, 0);
7772 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7773 && innermode == word_mode)
7776 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7777 NULL_RTX, VOIDmode, 0);
7778 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7779 op1 = convert_modes (innermode, mode,
7780 expand_expr (TREE_OPERAND (exp, 1),
7781 NULL_RTX, VOIDmode, 0),
7784 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7785 NULL_RTX, VOIDmode, 0);
7786 temp = expand_binop (mode, other_optab, op0, op1, target,
7787 unsignedp, OPTAB_LIB_WIDEN);
7788 htem = expand_mult_highpart_adjust (innermode,
7789 gen_highpart (innermode, temp),
7791 gen_highpart (innermode, temp),
7793 emit_move_insn (gen_highpart (innermode, temp), htem);
7798 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7799 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7800 return expand_mult (mode, op0, op1, target, unsignedp);
7802 case TRUNC_DIV_EXPR:
7803 case FLOOR_DIV_EXPR:
7805 case ROUND_DIV_EXPR:
7806 case EXACT_DIV_EXPR:
7807 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7809 /* Possible optimization: compute the dividend with EXPAND_SUM
7810 then if the divisor is constant can optimize the case
7811 where some terms of the dividend have coeffs divisible by it. */
7812 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7813 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7814 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7817 this_optab = flodiv_optab;
7820 case TRUNC_MOD_EXPR:
7821 case FLOOR_MOD_EXPR:
7823 case ROUND_MOD_EXPR:
7824 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7826 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7827 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7828 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7830 case FIX_ROUND_EXPR:
7831 case FIX_FLOOR_EXPR:
7833 abort (); /* Not used for C. */
7835 case FIX_TRUNC_EXPR:
7836 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7838 target = gen_reg_rtx (mode);
7839 expand_fix (target, op0, unsignedp);
7843 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7845 target = gen_reg_rtx (mode);
7846 /* expand_float can't figure out what to do if FROM has VOIDmode.
7847 So give it the correct mode. With -O, cse will optimize this. */
7848 if (GET_MODE (op0) == VOIDmode)
7849 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7851 expand_float (target, op0,
7852 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7856 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7857 temp = expand_unop (mode,
7858 ! unsignedp && flag_trapv
7859 && (GET_MODE_CLASS(mode) == MODE_INT)
7860 ? negv_optab : neg_optab, op0, target, 0);
7866 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7868 /* Handle complex values specially. */
7869 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7870 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7871 return expand_complex_abs (mode, op0, target, unsignedp);
7873 /* Unsigned abs is simply the operand. Testing here means we don't
7874 risk generating incorrect code below. */
7875 if (TREE_UNSIGNED (type))
7878 return expand_abs (mode, op0, target, unsignedp,
7879 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7883 target = original_target;
7884 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7885 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7886 || GET_MODE (target) != mode
7887 || (GET_CODE (target) == REG
7888 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7889 target = gen_reg_rtx (mode);
7890 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7891 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7893 /* First try to do it with a special MIN or MAX instruction.
7894 If that does not win, use a conditional jump to select the proper
7896 this_optab = (TREE_UNSIGNED (type)
7897 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7898 : (code == MIN_EXPR ? smin_optab : smax_optab));
7900 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7905 /* At this point, a MEM target is no longer useful; we will get better
7908 if (GET_CODE (target) == MEM)
7909 target = gen_reg_rtx (mode);
7912 emit_move_insn (target, op0);
7914 op0 = gen_label_rtx ();
7916 /* If this mode is an integer too wide to compare properly,
7917 compare word by word. Rely on cse to optimize constant cases. */
7918 if (GET_MODE_CLASS (mode) == MODE_INT
7919 && ! can_compare_p (GE, mode, ccp_jump))
7921 if (code == MAX_EXPR)
7922 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7923 target, op1, NULL_RTX, op0);
7925 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7926 op1, target, NULL_RTX, op0);
7930 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7931 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7932 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7935 emit_move_insn (target, op1);
7940 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7941 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7947 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7948 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7953 /* ??? Can optimize bitwise operations with one arg constant.
7954 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7955 and (a bitwise1 b) bitwise2 b (etc)
7956 but that is probably not worth while. */
7958 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7959 boolean values when we want in all cases to compute both of them. In
7960 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7961 as actual zero-or-1 values and then bitwise anding. In cases where
7962 there cannot be any side effects, better code would be made by
7963 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7964 how to recognize those cases. */
7966 case TRUTH_AND_EXPR:
7968 this_optab = and_optab;
7973 this_optab = ior_optab;
7976 case TRUTH_XOR_EXPR:
7978 this_optab = xor_optab;
7985 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7987 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7988 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7991 /* Could determine the answer when only additive constants differ. Also,
7992 the addition of one can be handled by changing the condition. */
7999 case UNORDERED_EXPR:
8006 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8010 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8011 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8013 && GET_CODE (original_target) == REG
8014 && (GET_MODE (original_target)
8015 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8017 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8020 if (temp != original_target)
8021 temp = copy_to_reg (temp);
8023 op1 = gen_label_rtx ();
8024 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8025 GET_MODE (temp), unsignedp, 0, op1);
8026 emit_move_insn (temp, const1_rtx);
8031 /* If no set-flag instruction, must generate a conditional
8032 store into a temporary variable. Drop through
8033 and handle this like && and ||. */
8035 case TRUTH_ANDIF_EXPR:
8036 case TRUTH_ORIF_EXPR:
8038 && (target == 0 || ! safe_from_p (target, exp, 1)
8039 /* Make sure we don't have a hard reg (such as function's return
8040 value) live across basic blocks, if not optimizing. */
8041 || (!optimize && GET_CODE (target) == REG
8042 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8043 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8046 emit_clr_insn (target);
8048 op1 = gen_label_rtx ();
8049 jumpifnot (exp, op1);
8052 emit_0_to_1_insn (target);
8055 return ignore ? const0_rtx : target;
8057 case TRUTH_NOT_EXPR:
8058 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8059 /* The parser is careful to generate TRUTH_NOT_EXPR
8060 only with operands that are always zero or one. */
8061 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8062 target, 1, OPTAB_LIB_WIDEN);
8068 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8070 return expand_expr (TREE_OPERAND (exp, 1),
8071 (ignore ? const0_rtx : target),
8075 /* If we would have a "singleton" (see below) were it not for a
8076 conversion in each arm, bring that conversion back out. */
8077 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8078 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8079 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8080 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8082 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8083 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8085 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8086 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8087 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8088 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8089 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8090 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8091 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8092 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8093 return expand_expr (build1 (NOP_EXPR, type,
8094 build (COND_EXPR, TREE_TYPE (iftrue),
8095 TREE_OPERAND (exp, 0),
8097 target, tmode, modifier);
8101 /* Note that COND_EXPRs whose type is a structure or union
8102 are required to be constructed to contain assignments of
8103 a temporary variable, so that we can evaluate them here
8104 for side effect only. If type is void, we must do likewise. */
8106 /* If an arm of the branch requires a cleanup,
8107 only that cleanup is performed. */
8110 tree binary_op = 0, unary_op = 0;
8112 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8113 convert it to our mode, if necessary. */
8114 if (integer_onep (TREE_OPERAND (exp, 1))
8115 && integer_zerop (TREE_OPERAND (exp, 2))
8116 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8120 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8125 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8126 if (GET_MODE (op0) == mode)
8130 target = gen_reg_rtx (mode);
8131 convert_move (target, op0, unsignedp);
8135 /* Check for X ? A + B : A. If we have this, we can copy A to the
8136 output and conditionally add B. Similarly for unary operations.
8137 Don't do this if X has side-effects because those side effects
8138 might affect A or B and the "?" operation is a sequence point in
8139 ANSI. (operand_equal_p tests for side effects.) */
8141 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8142 && operand_equal_p (TREE_OPERAND (exp, 2),
8143 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8144 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8145 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8146 && operand_equal_p (TREE_OPERAND (exp, 1),
8147 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8148 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8149 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8150 && operand_equal_p (TREE_OPERAND (exp, 2),
8151 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8152 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8153 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8154 && operand_equal_p (TREE_OPERAND (exp, 1),
8155 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8156 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8158 /* If we are not to produce a result, we have no target. Otherwise,
8159 if a target was specified use it; it will not be used as an
8160 intermediate target unless it is safe. If no target, use a
8165 else if (original_target
8166 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8167 || (singleton && GET_CODE (original_target) == REG
8168 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8169 && original_target == var_rtx (singleton)))
8170 && GET_MODE (original_target) == mode
8171 #ifdef HAVE_conditional_move
8172 && (! can_conditionally_move_p (mode)
8173 || GET_CODE (original_target) == REG
8174 || TREE_ADDRESSABLE (type))
8176 && ! (GET_CODE (original_target) == MEM
8177 && MEM_VOLATILE_P (original_target)))
8178 temp = original_target;
8179 else if (TREE_ADDRESSABLE (type))
8182 temp = assign_temp (type, 0, 0, 1);
8184 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8185 do the test of X as a store-flag operation, do this as
8186 A + ((X != 0) << log C). Similarly for other simple binary
8187 operators. Only do for C == 1 if BRANCH_COST is low. */
8188 if (temp && singleton && binary_op
8189 && (TREE_CODE (binary_op) == PLUS_EXPR
8190 || TREE_CODE (binary_op) == MINUS_EXPR
8191 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8192 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8193 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8194 : integer_onep (TREE_OPERAND (binary_op, 1)))
8195 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8198 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8199 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8200 ? addv_optab : add_optab)
8201 : TREE_CODE (binary_op) == MINUS_EXPR
8202 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8203 ? subv_optab : sub_optab)
8204 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8207 /* If we had X ? A : A + 1, do this as A + (X == 0).
8209 We have to invert the truth value here and then put it
8210 back later if do_store_flag fails. We cannot simply copy
8211 TREE_OPERAND (exp, 0) to another variable and modify that
8212 because invert_truthvalue can modify the tree pointed to
8214 if (singleton == TREE_OPERAND (exp, 1))
8215 TREE_OPERAND (exp, 0)
8216 = invert_truthvalue (TREE_OPERAND (exp, 0));
8218 result = do_store_flag (TREE_OPERAND (exp, 0),
8219 (safe_from_p (temp, singleton, 1)
8221 mode, BRANCH_COST <= 1);
8223 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8224 result = expand_shift (LSHIFT_EXPR, mode, result,
8225 build_int_2 (tree_log2
8229 (safe_from_p (temp, singleton, 1)
8230 ? temp : NULL_RTX), 0);
8234 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8235 return expand_binop (mode, boptab, op1, result, temp,
8236 unsignedp, OPTAB_LIB_WIDEN);
8238 else if (singleton == TREE_OPERAND (exp, 1))
8239 TREE_OPERAND (exp, 0)
8240 = invert_truthvalue (TREE_OPERAND (exp, 0));
8243 do_pending_stack_adjust ();
8245 op0 = gen_label_rtx ();
8247 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8251 /* If the target conflicts with the other operand of the
8252 binary op, we can't use it. Also, we can't use the target
8253 if it is a hard register, because evaluating the condition
8254 might clobber it. */
8256 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8257 || (GET_CODE (temp) == REG
8258 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8259 temp = gen_reg_rtx (mode);
8260 store_expr (singleton, temp, 0);
8263 expand_expr (singleton,
8264 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8265 if (singleton == TREE_OPERAND (exp, 1))
8266 jumpif (TREE_OPERAND (exp, 0), op0);
8268 jumpifnot (TREE_OPERAND (exp, 0), op0);
8270 start_cleanup_deferral ();
8271 if (binary_op && temp == 0)
8272 /* Just touch the other operand. */
8273 expand_expr (TREE_OPERAND (binary_op, 1),
8274 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8276 store_expr (build (TREE_CODE (binary_op), type,
8277 make_tree (type, temp),
8278 TREE_OPERAND (binary_op, 1)),
8281 store_expr (build1 (TREE_CODE (unary_op), type,
8282 make_tree (type, temp)),
8286 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8287 comparison operator. If we have one of these cases, set the
8288 output to A, branch on A (cse will merge these two references),
8289 then set the output to FOO. */
8291 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8292 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8293 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8294 TREE_OPERAND (exp, 1), 0)
8295 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8296 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8297 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8299 if (GET_CODE (temp) == REG
8300 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8301 temp = gen_reg_rtx (mode);
8302 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8303 jumpif (TREE_OPERAND (exp, 0), op0);
8305 start_cleanup_deferral ();
8306 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8310 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8311 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8312 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8313 TREE_OPERAND (exp, 2), 0)
8314 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8315 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8316 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8318 if (GET_CODE (temp) == REG
8319 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8320 temp = gen_reg_rtx (mode);
8321 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8322 jumpifnot (TREE_OPERAND (exp, 0), op0);
8324 start_cleanup_deferral ();
8325 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8330 op1 = gen_label_rtx ();
8331 jumpifnot (TREE_OPERAND (exp, 0), op0);
8333 start_cleanup_deferral ();
8335 /* One branch of the cond can be void, if it never returns. For
8336 example A ? throw : E */
8338 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8339 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8341 expand_expr (TREE_OPERAND (exp, 1),
8342 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8343 end_cleanup_deferral ();
8345 emit_jump_insn (gen_jump (op1));
8348 start_cleanup_deferral ();
8350 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8351 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8353 expand_expr (TREE_OPERAND (exp, 2),
8354 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8357 end_cleanup_deferral ();
8368 /* Something needs to be initialized, but we didn't know
8369 where that thing was when building the tree. For example,
8370 it could be the return value of a function, or a parameter
8371 to a function which lays down in the stack, or a temporary
8372 variable which must be passed by reference.
8374 We guarantee that the expression will either be constructed
8375 or copied into our original target. */
8377 tree slot = TREE_OPERAND (exp, 0);
8378 tree cleanups = NULL_TREE;
8381 if (TREE_CODE (slot) != VAR_DECL)
8385 target = original_target;
8387 /* Set this here so that if we get a target that refers to a
8388 register variable that's already been used, put_reg_into_stack
8389 knows that it should fix up those uses. */
8390 TREE_USED (slot) = 1;
8394 if (DECL_RTL_SET_P (slot))
8396 target = DECL_RTL (slot);
8397 /* If we have already expanded the slot, so don't do
8399 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8404 target = assign_temp (type, 2, 0, 1);
8405 /* All temp slots at this level must not conflict. */
8406 preserve_temp_slots (target);
8407 SET_DECL_RTL (slot, target);
8408 if (TREE_ADDRESSABLE (slot))
8409 put_var_into_stack (slot);
8411 /* Since SLOT is not known to the called function
8412 to belong to its stack frame, we must build an explicit
8413 cleanup. This case occurs when we must build up a reference
8414 to pass the reference as an argument. In this case,
8415 it is very likely that such a reference need not be
8418 if (TREE_OPERAND (exp, 2) == 0)
8419 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8420 cleanups = TREE_OPERAND (exp, 2);
8425 /* This case does occur, when expanding a parameter which
8426 needs to be constructed on the stack. The target
8427 is the actual stack address that we want to initialize.
8428 The function we call will perform the cleanup in this case. */
8430 /* If we have already assigned it space, use that space,
8431 not target that we were passed in, as our target
8432 parameter is only a hint. */
8433 if (DECL_RTL_SET_P (slot))
8435 target = DECL_RTL (slot);
8436 /* If we have already expanded the slot, so don't do
8438 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8443 SET_DECL_RTL (slot, target);
8444 /* If we must have an addressable slot, then make sure that
8445 the RTL that we just stored in slot is OK. */
8446 if (TREE_ADDRESSABLE (slot))
8447 put_var_into_stack (slot);
8451 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8452 /* Mark it as expanded. */
8453 TREE_OPERAND (exp, 1) = NULL_TREE;
8455 store_expr (exp1, target, 0);
8457 expand_decl_cleanup (NULL_TREE, cleanups);
8464 tree lhs = TREE_OPERAND (exp, 0);
8465 tree rhs = TREE_OPERAND (exp, 1);
8466 tree noncopied_parts = 0;
8467 tree lhs_type = TREE_TYPE (lhs);
8469 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8470 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8471 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8472 TYPE_NONCOPIED_PARTS (lhs_type));
8473 while (noncopied_parts != 0)
8475 expand_assignment (TREE_VALUE (noncopied_parts),
8476 TREE_PURPOSE (noncopied_parts), 0, 0);
8477 noncopied_parts = TREE_CHAIN (noncopied_parts);
8484 /* If lhs is complex, expand calls in rhs before computing it.
8485 That's so we don't compute a pointer and save it over a call.
8486 If lhs is simple, compute it first so we can give it as a
8487 target if the rhs is just a call. This avoids an extra temp and copy
8488 and that prevents a partial-subsumption which makes bad code.
8489 Actually we could treat component_ref's of vars like vars. */
8491 tree lhs = TREE_OPERAND (exp, 0);
8492 tree rhs = TREE_OPERAND (exp, 1);
8493 tree noncopied_parts = 0;
8494 tree lhs_type = TREE_TYPE (lhs);
8498 /* Check for |= or &= of a bitfield of size one into another bitfield
8499 of size 1. In this case, (unless we need the result of the
8500 assignment) we can do this more efficiently with a
8501 test followed by an assignment, if necessary.
8503 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8504 things change so we do, this code should be enhanced to
8507 && TREE_CODE (lhs) == COMPONENT_REF
8508 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8509 || TREE_CODE (rhs) == BIT_AND_EXPR)
8510 && TREE_OPERAND (rhs, 0) == lhs
8511 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8512 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8513 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8515 rtx label = gen_label_rtx ();
8517 do_jump (TREE_OPERAND (rhs, 1),
8518 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8519 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8520 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8521 (TREE_CODE (rhs) == BIT_IOR_EXPR
8523 : integer_zero_node)),
8525 do_pending_stack_adjust ();
8530 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8531 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8532 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8533 TYPE_NONCOPIED_PARTS (lhs_type));
8535 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8536 while (noncopied_parts != 0)
8538 expand_assignment (TREE_PURPOSE (noncopied_parts),
8539 TREE_VALUE (noncopied_parts), 0, 0);
8540 noncopied_parts = TREE_CHAIN (noncopied_parts);
8546 if (!TREE_OPERAND (exp, 0))
8547 expand_null_return ();
8549 expand_return (TREE_OPERAND (exp, 0));
8552 case PREINCREMENT_EXPR:
8553 case PREDECREMENT_EXPR:
8554 return expand_increment (exp, 0, ignore);
8556 case POSTINCREMENT_EXPR:
8557 case POSTDECREMENT_EXPR:
8558 /* Faster to treat as pre-increment if result is not used. */
8559 return expand_increment (exp, ! ignore, ignore);
8562 /* If nonzero, TEMP will be set to the address of something that might
8563 be a MEM corresponding to a stack slot. */
8566 /* Are we taking the address of a nested function? */
8567 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8568 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8569 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8570 && ! TREE_STATIC (exp))
8572 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8573 op0 = force_operand (op0, target);
8575 /* If we are taking the address of something erroneous, just
8577 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8581 /* We make sure to pass const0_rtx down if we came in with
8582 ignore set, to avoid doing the cleanups twice for something. */
8583 op0 = expand_expr (TREE_OPERAND (exp, 0),
8584 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8585 (modifier == EXPAND_INITIALIZER
8586 ? modifier : EXPAND_CONST_ADDRESS));
8588 /* If we are going to ignore the result, OP0 will have been set
8589 to const0_rtx, so just return it. Don't get confused and
8590 think we are taking the address of the constant. */
8594 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8595 clever and returns a REG when given a MEM. */
8596 op0 = protect_from_queue (op0, 1);
8598 /* We would like the object in memory. If it is a constant, we can
8599 have it be statically allocated into memory. For a non-constant,
8600 we need to allocate some memory and store the value into it. */
8602 if (CONSTANT_P (op0))
8603 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8605 else if (GET_CODE (op0) == MEM)
8607 mark_temp_addr_taken (op0);
8608 temp = XEXP (op0, 0);
8611 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8612 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8613 || GET_CODE (op0) == PARALLEL)
8615 /* If this object is in a register, it must be not
8617 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8618 tree nt = build_qualified_type (inner_type,
8619 (TYPE_QUALS (inner_type)
8620 | TYPE_QUAL_CONST));
8621 rtx memloc = assign_temp (nt, 1, 1, 1);
8623 mark_temp_addr_taken (memloc);
8624 if (GET_CODE (op0) == PARALLEL)
8625 /* Handle calls that pass values in multiple non-contiguous
8626 locations. The Irix 6 ABI has examples of this. */
8627 emit_group_store (memloc, op0,
8628 int_size_in_bytes (inner_type),
8629 TYPE_ALIGN (inner_type));
8631 emit_move_insn (memloc, op0);
8635 if (GET_CODE (op0) != MEM)
8638 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8640 temp = XEXP (op0, 0);
8641 #ifdef POINTERS_EXTEND_UNSIGNED
8642 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8643 && mode == ptr_mode)
8644 temp = convert_memory_address (ptr_mode, temp);
8649 op0 = force_operand (XEXP (op0, 0), target);
8652 if (flag_force_addr && GET_CODE (op0) != REG)
8653 op0 = force_reg (Pmode, op0);
8655 if (GET_CODE (op0) == REG
8656 && ! REG_USERVAR_P (op0))
8657 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8659 /* If we might have had a temp slot, add an equivalent address
8662 update_temp_slot_address (temp, op0);
8664 #ifdef POINTERS_EXTEND_UNSIGNED
8665 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8666 && mode == ptr_mode)
8667 op0 = convert_memory_address (ptr_mode, op0);
8672 case ENTRY_VALUE_EXPR:
8675 /* COMPLEX type for Extended Pascal & Fortran */
8678 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8681 /* Get the rtx code of the operands. */
8682 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8683 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8686 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8690 /* Move the real (op0) and imaginary (op1) parts to their location. */
8691 emit_move_insn (gen_realpart (mode, target), op0);
8692 emit_move_insn (gen_imagpart (mode, target), op1);
8694 insns = get_insns ();
8697 /* Complex construction should appear as a single unit. */
8698 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8699 each with a separate pseudo as destination.
8700 It's not correct for flow to treat them as a unit. */
8701 if (GET_CODE (target) != CONCAT)
8702 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8710 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8711 return gen_realpart (mode, op0);
8714 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8715 return gen_imagpart (mode, op0);
8719 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8723 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8726 target = gen_reg_rtx (mode);
8730 /* Store the realpart and the negated imagpart to target. */
8731 emit_move_insn (gen_realpart (partmode, target),
8732 gen_realpart (partmode, op0));
8734 imag_t = gen_imagpart (partmode, target);
8735 temp = expand_unop (partmode,
8736 ! unsignedp && flag_trapv
8737 && (GET_MODE_CLASS(partmode) == MODE_INT)
8738 ? negv_optab : neg_optab,
8739 gen_imagpart (partmode, op0), imag_t, 0);
8741 emit_move_insn (imag_t, temp);
8743 insns = get_insns ();
8746 /* Conjugate should appear as a single unit
8747 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8748 each with a separate pseudo as destination.
8749 It's not correct for flow to treat them as a unit. */
8750 if (GET_CODE (target) != CONCAT)
8751 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8758 case TRY_CATCH_EXPR:
8760 tree handler = TREE_OPERAND (exp, 1);
8762 expand_eh_region_start ();
8764 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8766 expand_eh_region_end_cleanup (handler);
8771 case TRY_FINALLY_EXPR:
8773 tree try_block = TREE_OPERAND (exp, 0);
8774 tree finally_block = TREE_OPERAND (exp, 1);
8775 rtx finally_label = gen_label_rtx ();
8776 rtx done_label = gen_label_rtx ();
8777 rtx return_link = gen_reg_rtx (Pmode);
8778 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8779 (tree) finally_label, (tree) return_link);
8780 TREE_SIDE_EFFECTS (cleanup) = 1;
8782 /* Start a new binding layer that will keep track of all cleanup
8783 actions to be performed. */
8784 expand_start_bindings (2);
8786 target_temp_slot_level = temp_slot_level;
8788 expand_decl_cleanup (NULL_TREE, cleanup);
8789 op0 = expand_expr (try_block, target, tmode, modifier);
8791 preserve_temp_slots (op0);
8792 expand_end_bindings (NULL_TREE, 0, 0);
8793 emit_jump (done_label);
8794 emit_label (finally_label);
8795 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8796 emit_indirect_jump (return_link);
8797 emit_label (done_label);
8801 case GOTO_SUBROUTINE_EXPR:
8803 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8804 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8805 rtx return_address = gen_label_rtx ();
8806 emit_move_insn (return_link,
8807 gen_rtx_LABEL_REF (Pmode, return_address));
8809 emit_label (return_address);
8814 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8817 return get_exception_pointer ();
8820 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8823 /* Here to do an ordinary binary operator, generating an instruction
8824 from the optab already placed in `this_optab'. */
8826 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8828 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8829 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8831 temp = expand_binop (mode, this_optab, op0, op1, target,
8832 unsignedp, OPTAB_LIB_WIDEN);
8838 /* Similar to expand_expr, except that we don't specify a target, target
8839 mode, or modifier and we return the alignment of the inner type. This is
8840 used in cases where it is not necessary to align the result to the
8841 alignment of its type as long as we know the alignment of the result, for
8842 example for comparisons of BLKmode values. */
8845 expand_expr_unaligned (exp, palign)
8847 unsigned int *palign;
8850 tree type = TREE_TYPE (exp);
8851 register enum machine_mode mode = TYPE_MODE (type);
8853 /* Default the alignment we return to that of the type. */
8854 *palign = TYPE_ALIGN (type);
8856 /* The only cases in which we do anything special is if the resulting mode
8858 if (mode != BLKmode)
8859 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8861 switch (TREE_CODE (exp))
8865 case NON_LVALUE_EXPR:
8866 /* Conversions between BLKmode values don't change the underlying
8867 alignment or value. */
8868 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8869 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8873 /* Much of the code for this case is copied directly from expand_expr.
8874 We need to duplicate it here because we will do something different
8875 in the fall-through case, so we need to handle the same exceptions
8878 tree array = TREE_OPERAND (exp, 0);
8879 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8880 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8881 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8884 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8887 /* Optimize the special-case of a zero lower bound.
8889 We convert the low_bound to sizetype to avoid some problems
8890 with constant folding. (E.g. suppose the lower bound is 1,
8891 and its mode is QI. Without the conversion, (ARRAY
8892 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8893 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8895 if (! integer_zerop (low_bound))
8896 index = size_diffop (index, convert (sizetype, low_bound));
8898 /* If this is a constant index into a constant array,
8899 just get the value from the array. Handle both the cases when
8900 we have an explicit constructor and when our operand is a variable
8901 that was declared const. */
8903 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8904 && host_integerp (index, 0)
8905 && 0 > compare_tree_int (index,
8906 list_length (CONSTRUCTOR_ELTS
8907 (TREE_OPERAND (exp, 0)))))
8911 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8912 i = tree_low_cst (index, 0);
8913 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8917 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8920 else if (optimize >= 1
8921 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8922 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8923 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8925 if (TREE_CODE (index) == INTEGER_CST)
8927 tree init = DECL_INITIAL (array);
8929 if (TREE_CODE (init) == CONSTRUCTOR)
8933 for (elem = CONSTRUCTOR_ELTS (init);
8934 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8935 elem = TREE_CHAIN (elem))
8939 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8949 /* If the operand is a CONSTRUCTOR, we can just extract the
8950 appropriate field if it is present. Don't do this if we have
8951 already written the data since we want to refer to that copy
8952 and varasm.c assumes that's what we'll do. */
8953 if (TREE_CODE (exp) != ARRAY_REF
8954 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8955 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8959 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8960 elt = TREE_CHAIN (elt))
8961 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8962 /* Note that unlike the case in expand_expr, we know this is
8963 BLKmode and hence not an integer. */
8964 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8968 enum machine_mode mode1;
8969 HOST_WIDE_INT bitsize, bitpos;
8972 unsigned int alignment;
8974 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8975 &mode1, &unsignedp, &volatilep,
8978 /* If we got back the original object, something is wrong. Perhaps
8979 we are evaluating an expression too early. In any event, don't
8980 infinitely recurse. */
8984 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8986 /* If this is a constant, put it into a register if it is a
8987 legitimate constant and OFFSET is 0 and memory if it isn't. */
8988 if (CONSTANT_P (op0))
8990 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8992 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8994 op0 = force_reg (inner_mode, op0);
8996 op0 = validize_mem (force_const_mem (inner_mode, op0));
9001 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9003 /* If this object is in a register, put it into memory.
9004 This case can't occur in C, but can in Ada if we have
9005 unchecked conversion of an expression from a scalar type to
9006 an array or record type. */
9007 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9008 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9010 tree nt = build_qualified_type (TREE_TYPE (tem),
9011 (TYPE_QUALS (TREE_TYPE (tem))
9012 | TYPE_QUAL_CONST));
9013 rtx memloc = assign_temp (nt, 1, 1, 1);
9015 mark_temp_addr_taken (memloc);
9016 emit_move_insn (memloc, op0);
9020 if (GET_CODE (op0) != MEM)
9023 if (GET_MODE (offset_rtx) != ptr_mode)
9025 #ifdef POINTERS_EXTEND_UNSIGNED
9026 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9028 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9032 op0 = change_address (op0, VOIDmode,
9033 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9034 force_reg (ptr_mode,
9038 /* Don't forget about volatility even if this is a bitfield. */
9039 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9041 op0 = copy_rtx (op0);
9042 MEM_VOLATILE_P (op0) = 1;
9045 /* Check the access. */
9046 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9051 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9052 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9054 /* Check the access right of the pointer. */
9055 in_check_memory_usage = 1;
9056 if (size > BITS_PER_UNIT)
9057 emit_library_call (chkr_check_addr_libfunc,
9058 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9059 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9060 TYPE_MODE (sizetype),
9061 GEN_INT (MEMORY_USE_RO),
9062 TYPE_MODE (integer_type_node));
9063 in_check_memory_usage = 0;
9066 /* In cases where an aligned union has an unaligned object
9067 as a field, we might be extracting a BLKmode value from
9068 an integer-mode (e.g., SImode) object. Handle this case
9069 by doing the extract into an object as wide as the field
9070 (which we know to be the width of a basic mode), then
9071 storing into memory, and changing the mode to BLKmode.
9072 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9073 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9074 if (mode1 == VOIDmode
9075 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9076 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9077 && (TYPE_ALIGN (type) > alignment
9078 || bitpos % TYPE_ALIGN (type) != 0)))
9080 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9082 if (ext_mode == BLKmode)
9084 /* In this case, BITPOS must start at a byte boundary. */
9085 if (GET_CODE (op0) != MEM
9086 || bitpos % BITS_PER_UNIT != 0)
9089 op0 = change_address (op0, VOIDmode,
9090 plus_constant (XEXP (op0, 0),
9091 bitpos / BITS_PER_UNIT));
9095 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9097 rtx new = assign_temp (nt, 0, 1, 1);
9099 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9100 unsignedp, NULL_RTX, ext_mode,
9101 ext_mode, alignment,
9102 int_size_in_bytes (TREE_TYPE (tem)));
9104 /* If the result is a record type and BITSIZE is narrower than
9105 the mode of OP0, an integral mode, and this is a big endian
9106 machine, we must put the field into the high-order bits. */
9107 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9108 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9109 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9110 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9111 size_int (GET_MODE_BITSIZE
9116 emit_move_insn (new, op0);
9117 op0 = copy_rtx (new);
9118 PUT_MODE (op0, BLKmode);
9122 /* Get a reference to just this component. */
9123 op0 = change_address (op0, mode1,
9124 plus_constant (XEXP (op0, 0),
9125 (bitpos / BITS_PER_UNIT)));
9127 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9129 /* Adjust the alignment in case the bit position is not
9130 a multiple of the alignment of the inner object. */
9131 while (bitpos % alignment != 0)
9134 if (GET_CODE (XEXP (op0, 0)) == REG)
9135 mark_reg_pointer (XEXP (op0, 0), alignment);
9137 MEM_IN_STRUCT_P (op0) = 1;
9138 MEM_VOLATILE_P (op0) |= volatilep;
9140 *palign = alignment;
9149 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9152 /* Return the tree node if a ARG corresponds to a string constant or zero
9153 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9154 in bytes within the string that ARG is accessing. The type of the
9155 offset will be `sizetype'. */
9158 string_constant (arg, ptr_offset)
9164 if (TREE_CODE (arg) == ADDR_EXPR
9165 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9167 *ptr_offset = size_zero_node;
9168 return TREE_OPERAND (arg, 0);
9170 else if (TREE_CODE (arg) == PLUS_EXPR)
9172 tree arg0 = TREE_OPERAND (arg, 0);
9173 tree arg1 = TREE_OPERAND (arg, 1);
9178 if (TREE_CODE (arg0) == ADDR_EXPR
9179 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9181 *ptr_offset = convert (sizetype, arg1);
9182 return TREE_OPERAND (arg0, 0);
9184 else if (TREE_CODE (arg1) == ADDR_EXPR
9185 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9187 *ptr_offset = convert (sizetype, arg0);
9188 return TREE_OPERAND (arg1, 0);
9195 /* Expand code for a post- or pre- increment or decrement
9196 and return the RTX for the result.
9197 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9200 expand_increment (exp, post, ignore)
9204 register rtx op0, op1;
9205 register rtx temp, value;
9206 register tree incremented = TREE_OPERAND (exp, 0);
9207 optab this_optab = add_optab;
9209 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9210 int op0_is_copy = 0;
9211 int single_insn = 0;
9212 /* 1 means we can't store into OP0 directly,
9213 because it is a subreg narrower than a word,
9214 and we don't dare clobber the rest of the word. */
9217 /* Stabilize any component ref that might need to be
9218 evaluated more than once below. */
9220 || TREE_CODE (incremented) == BIT_FIELD_REF
9221 || (TREE_CODE (incremented) == COMPONENT_REF
9222 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9223 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9224 incremented = stabilize_reference (incremented);
9225 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9226 ones into save exprs so that they don't accidentally get evaluated
9227 more than once by the code below. */
9228 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9229 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9230 incremented = save_expr (incremented);
9232 /* Compute the operands as RTX.
9233 Note whether OP0 is the actual lvalue or a copy of it:
9234 I believe it is a copy iff it is a register or subreg
9235 and insns were generated in computing it. */
9237 temp = get_last_insn ();
9238 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9240 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9241 in place but instead must do sign- or zero-extension during assignment,
9242 so we copy it into a new register and let the code below use it as
9245 Note that we can safely modify this SUBREG since it is know not to be
9246 shared (it was made by the expand_expr call above). */
9248 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9251 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9255 else if (GET_CODE (op0) == SUBREG
9256 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9258 /* We cannot increment this SUBREG in place. If we are
9259 post-incrementing, get a copy of the old value. Otherwise,
9260 just mark that we cannot increment in place. */
9262 op0 = copy_to_reg (op0);
9267 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9268 && temp != get_last_insn ());
9269 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9270 EXPAND_MEMORY_USE_BAD);
9272 /* Decide whether incrementing or decrementing. */
9273 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9274 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9275 this_optab = sub_optab;
9277 /* Convert decrement by a constant into a negative increment. */
9278 if (this_optab == sub_optab
9279 && GET_CODE (op1) == CONST_INT)
9281 op1 = GEN_INT (-INTVAL (op1));
9282 this_optab = add_optab;
9285 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9286 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9288 /* For a preincrement, see if we can do this with a single instruction. */
9291 icode = (int) this_optab->handlers[(int) mode].insn_code;
9292 if (icode != (int) CODE_FOR_nothing
9293 /* Make sure that OP0 is valid for operands 0 and 1
9294 of the insn we want to queue. */
9295 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9296 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9297 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9301 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9302 then we cannot just increment OP0. We must therefore contrive to
9303 increment the original value. Then, for postincrement, we can return
9304 OP0 since it is a copy of the old value. For preincrement, expand here
9305 unless we can do it with a single insn.
9307 Likewise if storing directly into OP0 would clobber high bits
9308 we need to preserve (bad_subreg). */
9309 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9311 /* This is the easiest way to increment the value wherever it is.
9312 Problems with multiple evaluation of INCREMENTED are prevented
9313 because either (1) it is a component_ref or preincrement,
9314 in which case it was stabilized above, or (2) it is an array_ref
9315 with constant index in an array in a register, which is
9316 safe to reevaluate. */
9317 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9318 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9319 ? MINUS_EXPR : PLUS_EXPR),
9322 TREE_OPERAND (exp, 1));
9324 while (TREE_CODE (incremented) == NOP_EXPR
9325 || TREE_CODE (incremented) == CONVERT_EXPR)
9327 newexp = convert (TREE_TYPE (incremented), newexp);
9328 incremented = TREE_OPERAND (incremented, 0);
9331 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9332 return post ? op0 : temp;
9337 /* We have a true reference to the value in OP0.
9338 If there is an insn to add or subtract in this mode, queue it.
9339 Queueing the increment insn avoids the register shuffling
9340 that often results if we must increment now and first save
9341 the old value for subsequent use. */
9343 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9344 op0 = stabilize (op0);
9347 icode = (int) this_optab->handlers[(int) mode].insn_code;
9348 if (icode != (int) CODE_FOR_nothing
9349 /* Make sure that OP0 is valid for operands 0 and 1
9350 of the insn we want to queue. */
9351 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9352 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9354 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9355 op1 = force_reg (mode, op1);
9357 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9359 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9361 rtx addr = (general_operand (XEXP (op0, 0), mode)
9362 ? force_reg (Pmode, XEXP (op0, 0))
9363 : copy_to_reg (XEXP (op0, 0)));
9366 op0 = change_address (op0, VOIDmode, addr);
9367 temp = force_reg (GET_MODE (op0), op0);
9368 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9369 op1 = force_reg (mode, op1);
9371 /* The increment queue is LIFO, thus we have to `queue'
9372 the instructions in reverse order. */
9373 enqueue_insn (op0, gen_move_insn (op0, temp));
9374 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9379 /* Preincrement, or we can't increment with one simple insn. */
9381 /* Save a copy of the value before inc or dec, to return it later. */
9382 temp = value = copy_to_reg (op0);
9384 /* Arrange to return the incremented value. */
9385 /* Copy the rtx because expand_binop will protect from the queue,
9386 and the results of that would be invalid for us to return
9387 if our caller does emit_queue before using our result. */
9388 temp = copy_rtx (value = op0);
9390 /* Increment however we can. */
9391 op1 = expand_binop (mode, this_optab, value, op1,
9392 current_function_check_memory_usage ? NULL_RTX : op0,
9393 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9394 /* Make sure the value is stored into OP0. */
9396 emit_move_insn (op0, op1);
9401 /* At the start of a function, record that we have no previously-pushed
9402 arguments waiting to be popped. */
9405 init_pending_stack_adjust ()
9407 pending_stack_adjust = 0;
9410 /* When exiting from function, if safe, clear out any pending stack adjust
9411 so the adjustment won't get done.
9413 Note, if the current function calls alloca, then it must have a
9414 frame pointer regardless of the value of flag_omit_frame_pointer. */
9417 clear_pending_stack_adjust ()
9419 #ifdef EXIT_IGNORE_STACK
9421 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9422 && EXIT_IGNORE_STACK
9423 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9424 && ! flag_inline_functions)
9426 stack_pointer_delta -= pending_stack_adjust,
9427 pending_stack_adjust = 0;
9432 /* Pop any previously-pushed arguments that have not been popped yet. */
9435 do_pending_stack_adjust ()
9437 if (inhibit_defer_pop == 0)
9439 if (pending_stack_adjust != 0)
9440 adjust_stack (GEN_INT (pending_stack_adjust));
9441 pending_stack_adjust = 0;
9445 /* Expand conditional expressions. */
9447 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9448 LABEL is an rtx of code CODE_LABEL, in this function and all the
9452 jumpifnot (exp, label)
9456 do_jump (exp, label, NULL_RTX);
9459 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9466 do_jump (exp, NULL_RTX, label);
9469 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9470 the result is zero, or IF_TRUE_LABEL if the result is one.
9471 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9472 meaning fall through in that case.
9474 do_jump always does any pending stack adjust except when it does not
9475 actually perform a jump. An example where there is no jump
9476 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9478 This function is responsible for optimizing cases such as
9479 &&, || and comparison operators in EXP. */
9482 do_jump (exp, if_false_label, if_true_label)
9484 rtx if_false_label, if_true_label;
9486 register enum tree_code code = TREE_CODE (exp);
9487 /* Some cases need to create a label to jump to
9488 in order to properly fall through.
9489 These cases set DROP_THROUGH_LABEL nonzero. */
9490 rtx drop_through_label = 0;
9494 enum machine_mode mode;
9496 #ifdef MAX_INTEGER_COMPUTATION_MODE
9497 check_max_integer_computation_mode (exp);
9508 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9514 /* This is not true with #pragma weak */
9516 /* The address of something can never be zero. */
9518 emit_jump (if_true_label);
9523 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9524 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9525 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9528 /* If we are narrowing the operand, we have to do the compare in the
9530 if ((TYPE_PRECISION (TREE_TYPE (exp))
9531 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9533 case NON_LVALUE_EXPR:
9534 case REFERENCE_EXPR:
9539 /* These cannot change zero->non-zero or vice versa. */
9540 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9543 case WITH_RECORD_EXPR:
9544 /* Put the object on the placeholder list, recurse through our first
9545 operand, and pop the list. */
9546 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9548 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9549 placeholder_list = TREE_CHAIN (placeholder_list);
9553 /* This is never less insns than evaluating the PLUS_EXPR followed by
9554 a test and can be longer if the test is eliminated. */
9556 /* Reduce to minus. */
9557 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9558 TREE_OPERAND (exp, 0),
9559 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9560 TREE_OPERAND (exp, 1))));
9561 /* Process as MINUS. */
9565 /* Non-zero iff operands of minus differ. */
9566 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9567 TREE_OPERAND (exp, 0),
9568 TREE_OPERAND (exp, 1)),
9569 NE, NE, if_false_label, if_true_label);
9573 /* If we are AND'ing with a small constant, do this comparison in the
9574 smallest type that fits. If the machine doesn't have comparisons
9575 that small, it will be converted back to the wider comparison.
9576 This helps if we are testing the sign bit of a narrower object.
9577 combine can't do this for us because it can't know whether a
9578 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9580 if (! SLOW_BYTE_ACCESS
9581 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9582 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9583 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9584 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9585 && (type = type_for_mode (mode, 1)) != 0
9586 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9587 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9588 != CODE_FOR_nothing))
9590 do_jump (convert (type, exp), if_false_label, if_true_label);
9595 case TRUTH_NOT_EXPR:
9596 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9599 case TRUTH_ANDIF_EXPR:
9600 if (if_false_label == 0)
9601 if_false_label = drop_through_label = gen_label_rtx ();
9602 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9603 start_cleanup_deferral ();
9604 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9605 end_cleanup_deferral ();
9608 case TRUTH_ORIF_EXPR:
9609 if (if_true_label == 0)
9610 if_true_label = drop_through_label = gen_label_rtx ();
9611 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9612 start_cleanup_deferral ();
9613 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9614 end_cleanup_deferral ();
9619 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9620 preserve_temp_slots (NULL_RTX);
9624 do_pending_stack_adjust ();
9625 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9632 HOST_WIDE_INT bitsize, bitpos;
9634 enum machine_mode mode;
9638 unsigned int alignment;
9640 /* Get description of this reference. We don't actually care
9641 about the underlying object here. */
9642 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9643 &unsignedp, &volatilep, &alignment);
9645 type = type_for_size (bitsize, unsignedp);
9646 if (! SLOW_BYTE_ACCESS
9647 && type != 0 && bitsize >= 0
9648 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9649 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9650 != CODE_FOR_nothing))
9652 do_jump (convert (type, exp), if_false_label, if_true_label);
9659 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9660 if (integer_onep (TREE_OPERAND (exp, 1))
9661 && integer_zerop (TREE_OPERAND (exp, 2)))
9662 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9664 else if (integer_zerop (TREE_OPERAND (exp, 1))
9665 && integer_onep (TREE_OPERAND (exp, 2)))
9666 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9670 register rtx label1 = gen_label_rtx ();
9671 drop_through_label = gen_label_rtx ();
9673 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9675 start_cleanup_deferral ();
9676 /* Now the THEN-expression. */
9677 do_jump (TREE_OPERAND (exp, 1),
9678 if_false_label ? if_false_label : drop_through_label,
9679 if_true_label ? if_true_label : drop_through_label);
9680 /* In case the do_jump just above never jumps. */
9681 do_pending_stack_adjust ();
9682 emit_label (label1);
9684 /* Now the ELSE-expression. */
9685 do_jump (TREE_OPERAND (exp, 2),
9686 if_false_label ? if_false_label : drop_through_label,
9687 if_true_label ? if_true_label : drop_through_label);
9688 end_cleanup_deferral ();
9694 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9696 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9697 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9699 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9700 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9703 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9704 fold (build (EQ_EXPR, TREE_TYPE (exp),
9705 fold (build1 (REALPART_EXPR,
9706 TREE_TYPE (inner_type),
9708 fold (build1 (REALPART_EXPR,
9709 TREE_TYPE (inner_type),
9711 fold (build (EQ_EXPR, TREE_TYPE (exp),
9712 fold (build1 (IMAGPART_EXPR,
9713 TREE_TYPE (inner_type),
9715 fold (build1 (IMAGPART_EXPR,
9716 TREE_TYPE (inner_type),
9718 if_false_label, if_true_label);
9721 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9722 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9724 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9725 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9726 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9728 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9734 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9736 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9737 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9739 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9740 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9743 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9744 fold (build (NE_EXPR, TREE_TYPE (exp),
9745 fold (build1 (REALPART_EXPR,
9746 TREE_TYPE (inner_type),
9748 fold (build1 (REALPART_EXPR,
9749 TREE_TYPE (inner_type),
9751 fold (build (NE_EXPR, TREE_TYPE (exp),
9752 fold (build1 (IMAGPART_EXPR,
9753 TREE_TYPE (inner_type),
9755 fold (build1 (IMAGPART_EXPR,
9756 TREE_TYPE (inner_type),
9758 if_false_label, if_true_label);
9761 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9762 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9764 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9765 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9766 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9768 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9773 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9774 if (GET_MODE_CLASS (mode) == MODE_INT
9775 && ! can_compare_p (LT, mode, ccp_jump))
9776 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9778 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9782 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9783 if (GET_MODE_CLASS (mode) == MODE_INT
9784 && ! can_compare_p (LE, mode, ccp_jump))
9785 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9787 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9791 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9792 if (GET_MODE_CLASS (mode) == MODE_INT
9793 && ! can_compare_p (GT, mode, ccp_jump))
9794 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9796 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9800 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9801 if (GET_MODE_CLASS (mode) == MODE_INT
9802 && ! can_compare_p (GE, mode, ccp_jump))
9803 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9805 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9808 case UNORDERED_EXPR:
9811 enum rtx_code cmp, rcmp;
9814 if (code == UNORDERED_EXPR)
9815 cmp = UNORDERED, rcmp = ORDERED;
9817 cmp = ORDERED, rcmp = UNORDERED;
9818 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9821 if (! can_compare_p (cmp, mode, ccp_jump)
9822 && (can_compare_p (rcmp, mode, ccp_jump)
9823 /* If the target doesn't provide either UNORDERED or ORDERED
9824 comparisons, canonicalize on UNORDERED for the library. */
9825 || rcmp == UNORDERED))
9829 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9831 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9836 enum rtx_code rcode1;
9837 enum tree_code tcode2;
9861 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9862 if (can_compare_p (rcode1, mode, ccp_jump))
9863 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9867 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9868 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9871 /* If the target doesn't support combined unordered
9872 compares, decompose into UNORDERED + comparison. */
9873 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9874 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9875 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9876 do_jump (exp, if_false_label, if_true_label);
9883 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9885 /* This is not needed any more and causes poor code since it causes
9886 comparisons and tests from non-SI objects to have different code
9888 /* Copy to register to avoid generating bad insns by cse
9889 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9890 if (!cse_not_expected && GET_CODE (temp) == MEM)
9891 temp = copy_to_reg (temp);
9893 do_pending_stack_adjust ();
9894 /* Do any postincrements in the expression that was tested. */
9897 if (GET_CODE (temp) == CONST_INT
9898 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9899 || GET_CODE (temp) == LABEL_REF)
9901 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9905 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9906 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9907 /* Note swapping the labels gives us not-equal. */
9908 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9909 else if (GET_MODE (temp) != VOIDmode)
9910 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9911 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9912 GET_MODE (temp), NULL_RTX, 0,
9913 if_false_label, if_true_label);
9918 if (drop_through_label)
9920 /* If do_jump produces code that might be jumped around,
9921 do any stack adjusts from that code, before the place
9922 where control merges in. */
9923 do_pending_stack_adjust ();
9924 emit_label (drop_through_label);
9928 /* Given a comparison expression EXP for values too wide to be compared
9929 with one insn, test the comparison and jump to the appropriate label.
9930 The code of EXP is ignored; we always test GT if SWAP is 0,
9931 and LT if SWAP is 1. */
9934 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9937 rtx if_false_label, if_true_label;
9939 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9940 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9941 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9942 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9944 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9947 /* Compare OP0 with OP1, word at a time, in mode MODE.
9948 UNSIGNEDP says to do unsigned comparison.
9949 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9952 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9953 enum machine_mode mode;
9956 rtx if_false_label, if_true_label;
9958 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9959 rtx drop_through_label = 0;
9962 if (! if_true_label || ! if_false_label)
9963 drop_through_label = gen_label_rtx ();
9964 if (! if_true_label)
9965 if_true_label = drop_through_label;
9966 if (! if_false_label)
9967 if_false_label = drop_through_label;
9969 /* Compare a word at a time, high order first. */
9970 for (i = 0; i < nwords; i++)
9972 rtx op0_word, op1_word;
9974 if (WORDS_BIG_ENDIAN)
9976 op0_word = operand_subword_force (op0, i, mode);
9977 op1_word = operand_subword_force (op1, i, mode);
9981 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9982 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9985 /* All but high-order word must be compared as unsigned. */
9986 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9987 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9988 NULL_RTX, if_true_label);
9990 /* Consider lower words only if these are equal. */
9991 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9992 NULL_RTX, 0, NULL_RTX, if_false_label);
9996 emit_jump (if_false_label);
9997 if (drop_through_label)
9998 emit_label (drop_through_label);
10001 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10002 with one insn, test the comparison and jump to the appropriate label. */
10005 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10007 rtx if_false_label, if_true_label;
10009 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10010 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10011 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10012 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10014 rtx drop_through_label = 0;
10016 if (! if_false_label)
10017 drop_through_label = if_false_label = gen_label_rtx ();
10019 for (i = 0; i < nwords; i++)
10020 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10021 operand_subword_force (op1, i, mode),
10022 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10023 word_mode, NULL_RTX, 0, if_false_label,
10027 emit_jump (if_true_label);
10028 if (drop_through_label)
10029 emit_label (drop_through_label);
10032 /* Jump according to whether OP0 is 0.
10033 We assume that OP0 has an integer mode that is too wide
10034 for the available compare insns. */
10037 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10039 rtx if_false_label, if_true_label;
10041 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10044 rtx drop_through_label = 0;
10046 /* The fastest way of doing this comparison on almost any machine is to
10047 "or" all the words and compare the result. If all have to be loaded
10048 from memory and this is a very wide item, it's possible this may
10049 be slower, but that's highly unlikely. */
10051 part = gen_reg_rtx (word_mode);
10052 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10053 for (i = 1; i < nwords && part != 0; i++)
10054 part = expand_binop (word_mode, ior_optab, part,
10055 operand_subword_force (op0, i, GET_MODE (op0)),
10056 part, 1, OPTAB_WIDEN);
10060 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10061 NULL_RTX, 0, if_false_label, if_true_label);
10066 /* If we couldn't do the "or" simply, do this with a series of compares. */
10067 if (! if_false_label)
10068 drop_through_label = if_false_label = gen_label_rtx ();
10070 for (i = 0; i < nwords; i++)
10071 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10072 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10073 if_false_label, NULL_RTX);
10076 emit_jump (if_true_label);
10078 if (drop_through_label)
10079 emit_label (drop_through_label);
10082 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10083 (including code to compute the values to be compared)
10084 and set (CC0) according to the result.
10085 The decision as to signed or unsigned comparison must be made by the caller.
10087 We force a stack adjustment unless there are currently
10088 things pushed on the stack that aren't yet used.
10090 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10093 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10094 size of MODE should be used. */
10097 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10098 register rtx op0, op1;
10099 enum rtx_code code;
10101 enum machine_mode mode;
10103 unsigned int align;
10107 /* If one operand is constant, make it the second one. Only do this
10108 if the other operand is not constant as well. */
10110 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10111 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10116 code = swap_condition (code);
10119 if (flag_force_mem)
10121 op0 = force_not_mem (op0);
10122 op1 = force_not_mem (op1);
10125 do_pending_stack_adjust ();
10127 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10128 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10132 /* There's no need to do this now that combine.c can eliminate lots of
10133 sign extensions. This can be less efficient in certain cases on other
10136 /* If this is a signed equality comparison, we can do it as an
10137 unsigned comparison since zero-extension is cheaper than sign
10138 extension and comparisons with zero are done as unsigned. This is
10139 the case even on machines that can do fast sign extension, since
10140 zero-extension is easier to combine with other operations than
10141 sign-extension is. If we are comparing against a constant, we must
10142 convert it to what it would look like unsigned. */
10143 if ((code == EQ || code == NE) && ! unsignedp
10144 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10146 if (GET_CODE (op1) == CONST_INT
10147 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10148 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10153 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10155 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10158 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10159 The decision as to signed or unsigned comparison must be made by the caller.
10161 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10164 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10165 size of MODE should be used. */
10168 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10169 if_false_label, if_true_label)
10170 register rtx op0, op1;
10171 enum rtx_code code;
10173 enum machine_mode mode;
10175 unsigned int align;
10176 rtx if_false_label, if_true_label;
10179 int dummy_true_label = 0;
10181 /* Reverse the comparison if that is safe and we want to jump if it is
10183 if (! if_true_label && ! FLOAT_MODE_P (mode))
10185 if_true_label = if_false_label;
10186 if_false_label = 0;
10187 code = reverse_condition (code);
10190 /* If one operand is constant, make it the second one. Only do this
10191 if the other operand is not constant as well. */
10193 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10194 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10199 code = swap_condition (code);
10202 if (flag_force_mem)
10204 op0 = force_not_mem (op0);
10205 op1 = force_not_mem (op1);
10208 do_pending_stack_adjust ();
10210 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10211 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10213 if (tem == const_true_rtx)
10216 emit_jump (if_true_label);
10220 if (if_false_label)
10221 emit_jump (if_false_label);
10227 /* There's no need to do this now that combine.c can eliminate lots of
10228 sign extensions. This can be less efficient in certain cases on other
10231 /* If this is a signed equality comparison, we can do it as an
10232 unsigned comparison since zero-extension is cheaper than sign
10233 extension and comparisons with zero are done as unsigned. This is
10234 the case even on machines that can do fast sign extension, since
10235 zero-extension is easier to combine with other operations than
10236 sign-extension is. If we are comparing against a constant, we must
10237 convert it to what it would look like unsigned. */
10238 if ((code == EQ || code == NE) && ! unsignedp
10239 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10241 if (GET_CODE (op1) == CONST_INT
10242 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10243 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10248 if (! if_true_label)
10250 dummy_true_label = 1;
10251 if_true_label = gen_label_rtx ();
10254 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10257 if (if_false_label)
10258 emit_jump (if_false_label);
10259 if (dummy_true_label)
10260 emit_label (if_true_label);
10263 /* Generate code for a comparison expression EXP (including code to compute
10264 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10265 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10266 generated code will drop through.
10267 SIGNED_CODE should be the rtx operation for this comparison for
10268 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10270 We force a stack adjustment unless there are currently
10271 things pushed on the stack that aren't yet used. */
10274 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10277 enum rtx_code signed_code, unsigned_code;
10278 rtx if_false_label, if_true_label;
10280 unsigned int align0, align1;
10281 register rtx op0, op1;
10282 register tree type;
10283 register enum machine_mode mode;
10285 enum rtx_code code;
10287 /* Don't crash if the comparison was erroneous. */
10288 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10289 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10292 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10293 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10296 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10297 mode = TYPE_MODE (type);
10298 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10299 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10300 || (GET_MODE_BITSIZE (mode)
10301 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10304 /* op0 might have been replaced by promoted constant, in which
10305 case the type of second argument should be used. */
10306 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10307 mode = TYPE_MODE (type);
10309 unsignedp = TREE_UNSIGNED (type);
10310 code = unsignedp ? unsigned_code : signed_code;
10312 #ifdef HAVE_canonicalize_funcptr_for_compare
10313 /* If function pointers need to be "canonicalized" before they can
10314 be reliably compared, then canonicalize them. */
10315 if (HAVE_canonicalize_funcptr_for_compare
10316 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10317 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10320 rtx new_op0 = gen_reg_rtx (mode);
10322 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10326 if (HAVE_canonicalize_funcptr_for_compare
10327 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10328 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10331 rtx new_op1 = gen_reg_rtx (mode);
10333 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10338 /* Do any postincrements in the expression that was tested. */
10341 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10343 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10344 MIN (align0, align1),
10345 if_false_label, if_true_label);
10348 /* Generate code to calculate EXP using a store-flag instruction
10349 and return an rtx for the result. EXP is either a comparison
10350 or a TRUTH_NOT_EXPR whose operand is a comparison.
10352 If TARGET is nonzero, store the result there if convenient.
10354 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10357 Return zero if there is no suitable set-flag instruction
10358 available on this machine.
10360 Once expand_expr has been called on the arguments of the comparison,
10361 we are committed to doing the store flag, since it is not safe to
10362 re-evaluate the expression. We emit the store-flag insn by calling
10363 emit_store_flag, but only expand the arguments if we have a reason
10364 to believe that emit_store_flag will be successful. If we think that
10365 it will, but it isn't, we have to simulate the store-flag with a
10366 set/jump/set sequence. */
10369 do_store_flag (exp, target, mode, only_cheap)
10372 enum machine_mode mode;
10375 enum rtx_code code;
10376 tree arg0, arg1, type;
10378 enum machine_mode operand_mode;
10382 enum insn_code icode;
10383 rtx subtarget = target;
10386 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10387 result at the end. We can't simply invert the test since it would
10388 have already been inverted if it were valid. This case occurs for
10389 some floating-point comparisons. */
10391 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10392 invert = 1, exp = TREE_OPERAND (exp, 0);
10394 arg0 = TREE_OPERAND (exp, 0);
10395 arg1 = TREE_OPERAND (exp, 1);
10397 /* Don't crash if the comparison was erroneous. */
10398 if (arg0 == error_mark_node || arg1 == error_mark_node)
10401 type = TREE_TYPE (arg0);
10402 operand_mode = TYPE_MODE (type);
10403 unsignedp = TREE_UNSIGNED (type);
10405 /* We won't bother with BLKmode store-flag operations because it would mean
10406 passing a lot of information to emit_store_flag. */
10407 if (operand_mode == BLKmode)
10410 /* We won't bother with store-flag operations involving function pointers
10411 when function pointers must be canonicalized before comparisons. */
10412 #ifdef HAVE_canonicalize_funcptr_for_compare
10413 if (HAVE_canonicalize_funcptr_for_compare
10414 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10415 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10417 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10418 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10419 == FUNCTION_TYPE))))
10426 /* Get the rtx comparison code to use. We know that EXP is a comparison
10427 operation of some type. Some comparisons against 1 and -1 can be
10428 converted to comparisons with zero. Do so here so that the tests
10429 below will be aware that we have a comparison with zero. These
10430 tests will not catch constants in the first operand, but constants
10431 are rarely passed as the first operand. */
10433 switch (TREE_CODE (exp))
10442 if (integer_onep (arg1))
10443 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10445 code = unsignedp ? LTU : LT;
10448 if (! unsignedp && integer_all_onesp (arg1))
10449 arg1 = integer_zero_node, code = LT;
10451 code = unsignedp ? LEU : LE;
10454 if (! unsignedp && integer_all_onesp (arg1))
10455 arg1 = integer_zero_node, code = GE;
10457 code = unsignedp ? GTU : GT;
10460 if (integer_onep (arg1))
10461 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10463 code = unsignedp ? GEU : GE;
10466 case UNORDERED_EXPR:
10492 /* Put a constant second. */
10493 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10495 tem = arg0; arg0 = arg1; arg1 = tem;
10496 code = swap_condition (code);
10499 /* If this is an equality or inequality test of a single bit, we can
10500 do this by shifting the bit being tested to the low-order bit and
10501 masking the result with the constant 1. If the condition was EQ,
10502 we xor it with 1. This does not require an scc insn and is faster
10503 than an scc insn even if we have it. */
10505 if ((code == NE || code == EQ)
10506 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10507 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10509 tree inner = TREE_OPERAND (arg0, 0);
10510 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10513 /* If INNER is a right shift of a constant and it plus BITNUM does
10514 not overflow, adjust BITNUM and INNER. */
10516 if (TREE_CODE (inner) == RSHIFT_EXPR
10517 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10518 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10519 && bitnum < TYPE_PRECISION (type)
10520 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10521 bitnum - TYPE_PRECISION (type)))
10523 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10524 inner = TREE_OPERAND (inner, 0);
10527 /* If we are going to be able to omit the AND below, we must do our
10528 operations as unsigned. If we must use the AND, we have a choice.
10529 Normally unsigned is faster, but for some machines signed is. */
10530 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10531 #ifdef LOAD_EXTEND_OP
10532 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10538 if (! get_subtarget (subtarget)
10539 || GET_MODE (subtarget) != operand_mode
10540 || ! safe_from_p (subtarget, inner, 1))
10543 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10546 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10547 size_int (bitnum), subtarget, ops_unsignedp);
10549 if (GET_MODE (op0) != mode)
10550 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10552 if ((code == EQ && ! invert) || (code == NE && invert))
10553 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10554 ops_unsignedp, OPTAB_LIB_WIDEN);
10556 /* Put the AND last so it can combine with more things. */
10557 if (bitnum != TYPE_PRECISION (type) - 1)
10558 op0 = expand_and (op0, const1_rtx, subtarget);
10563 /* Now see if we are likely to be able to do this. Return if not. */
10564 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10567 icode = setcc_gen_code[(int) code];
10568 if (icode == CODE_FOR_nothing
10569 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10571 /* We can only do this if it is one of the special cases that
10572 can be handled without an scc insn. */
10573 if ((code == LT && integer_zerop (arg1))
10574 || (! only_cheap && code == GE && integer_zerop (arg1)))
10576 else if (BRANCH_COST >= 0
10577 && ! only_cheap && (code == NE || code == EQ)
10578 && TREE_CODE (type) != REAL_TYPE
10579 && ((abs_optab->handlers[(int) operand_mode].insn_code
10580 != CODE_FOR_nothing)
10581 || (ffs_optab->handlers[(int) operand_mode].insn_code
10582 != CODE_FOR_nothing)))
10588 if (! get_subtarget (target)
10589 || GET_MODE (subtarget) != operand_mode
10590 || ! safe_from_p (subtarget, arg1, 1))
10593 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10594 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10597 target = gen_reg_rtx (mode);
10599 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10600 because, if the emit_store_flag does anything it will succeed and
10601 OP0 and OP1 will not be used subsequently. */
10603 result = emit_store_flag (target, code,
10604 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10605 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10606 operand_mode, unsignedp, 1);
10611 result = expand_binop (mode, xor_optab, result, const1_rtx,
10612 result, 0, OPTAB_LIB_WIDEN);
10616 /* If this failed, we have to do this with set/compare/jump/set code. */
10617 if (GET_CODE (target) != REG
10618 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10619 target = gen_reg_rtx (GET_MODE (target));
10621 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10622 result = compare_from_rtx (op0, op1, code, unsignedp,
10623 operand_mode, NULL_RTX, 0);
10624 if (GET_CODE (result) == CONST_INT)
10625 return (((result == const0_rtx && ! invert)
10626 || (result != const0_rtx && invert))
10627 ? const0_rtx : const1_rtx);
10629 label = gen_label_rtx ();
10630 if (bcc_gen_fctn[(int) code] == 0)
10633 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10634 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10635 emit_label (label);
10640 /* Generate a tablejump instruction (used for switch statements). */
10642 #ifdef HAVE_tablejump
10644 /* INDEX is the value being switched on, with the lowest value
10645 in the table already subtracted.
10646 MODE is its expected mode (needed if INDEX is constant).
10647 RANGE is the length of the jump table.
10648 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10650 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10651 index value is out of range. */
10654 do_tablejump (index, mode, range, table_label, default_label)
10655 rtx index, range, table_label, default_label;
10656 enum machine_mode mode;
10658 register rtx temp, vector;
10660 /* Do an unsigned comparison (in the proper mode) between the index
10661 expression and the value which represents the length of the range.
10662 Since we just finished subtracting the lower bound of the range
10663 from the index expression, this comparison allows us to simultaneously
10664 check that the original index expression value is both greater than
10665 or equal to the minimum value of the range and less than or equal to
10666 the maximum value of the range. */
10668 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10671 /* If index is in range, it must fit in Pmode.
10672 Convert to Pmode so we can index with it. */
10674 index = convert_to_mode (Pmode, index, 1);
10676 /* Don't let a MEM slip thru, because then INDEX that comes
10677 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10678 and break_out_memory_refs will go to work on it and mess it up. */
10679 #ifdef PIC_CASE_VECTOR_ADDRESS
10680 if (flag_pic && GET_CODE (index) != REG)
10681 index = copy_to_mode_reg (Pmode, index);
10684 /* If flag_force_addr were to affect this address
10685 it could interfere with the tricky assumptions made
10686 about addresses that contain label-refs,
10687 which may be valid only very near the tablejump itself. */
10688 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10689 GET_MODE_SIZE, because this indicates how large insns are. The other
10690 uses should all be Pmode, because they are addresses. This code
10691 could fail if addresses and insns are not the same size. */
10692 index = gen_rtx_PLUS (Pmode,
10693 gen_rtx_MULT (Pmode, index,
10694 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10695 gen_rtx_LABEL_REF (Pmode, table_label));
10696 #ifdef PIC_CASE_VECTOR_ADDRESS
10698 index = PIC_CASE_VECTOR_ADDRESS (index);
10701 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10702 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10703 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10704 RTX_UNCHANGING_P (vector) = 1;
10705 convert_move (temp, vector, 0);
10707 emit_jump_insn (gen_tablejump (temp, table_label));
10709 /* If we are generating PIC code or if the table is PC-relative, the
10710 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10711 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10715 #endif /* HAVE_tablejump */