1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "typeclass.h"
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
63 #define STACK_PUSH_CODE PRE_INC
67 /* Assume that case vectors are not pc-relative. */
68 #ifndef CASE_VECTOR_PC_RELATIVE
69 #define CASE_VECTOR_PC_RELATIVE 0
72 /* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
79 int (*lang_safe_from_p) PARAMS ((rtx, tree));
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* Don't check memory usage, since code is being emitted to check a memory
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
92 static int in_check_memory_usage;
94 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95 static tree placeholder_list = 0;
97 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
114 /* This structure is used by store_by_pieces to describe the clear to
117 struct store_by_pieces
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
130 extern struct obstack permanent_obstack;
132 static rtx get_push_address PARAMS ((int));
134 static rtx enqueue_insn PARAMS ((rtx, rtx));
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
148 struct store_by_pieces *));
149 static rtx get_subtarget PARAMS ((rtx));
150 static int is_zeros_p PARAMS ((tree));
151 static int mostly_zeros_p PARAMS ((tree));
152 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
153 HOST_WIDE_INT, enum machine_mode,
154 tree, tree, unsigned int, int,
156 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
158 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int,
161 unsigned int, HOST_WIDE_INT, int));
162 static enum memory_use_mode
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164 static tree save_noncopied_parts PARAMS ((tree, tree));
165 static tree init_noncopied_parts PARAMS ((tree, tree));
166 static int fixed_type_p PARAMS ((tree));
167 static rtx var_rtx PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
184 /* If a memory-to-memory move would take MOVE_RATIO or more simple
185 move-instruction sequences, we will do a movstr or libcall instead. */
188 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
191 /* If we are optimizing for space (-Os), cut down the default move ratio. */
192 #define MOVE_RATIO (optimize_size ? 3 : 15)
196 /* This macro is used to determine whether move_by_pieces should be called
197 to perform a structure copy. */
198 #ifndef MOVE_BY_PIECES_P
199 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
203 /* This array records the insn_code of insns to perform block moves. */
204 enum insn_code movstr_optab[NUM_MACHINE_MODES];
206 /* This array records the insn_code of insns to perform block clears. */
207 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
209 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
211 #ifndef SLOW_UNALIGNED_ACCESS
212 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
215 /* This is run once per compilation to set up which modes can be used
216 directly in memory and to initialize the block move optab. */
222 enum machine_mode mode;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
235 pat = PATTERN (insn);
237 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
238 mode = (enum machine_mode) ((int) mode + 1))
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
245 PUT_MODE (mem1, mode);
247 /* See if there is some register that can be used in this mode and
248 directly loaded or stored from memory. */
250 if (mode != VOIDmode && mode != BLKmode)
251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
252 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
255 if (! HARD_REGNO_MODE_OK (regno, mode))
258 reg = gen_rtx_REG (mode, regno);
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
265 SET_SRC (pat) = mem1;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
271 SET_DEST (pat) = mem;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
276 SET_DEST (pat) = mem1;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
285 /* This is run at the start of compiling a function. */
290 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
293 pending_stack_adjust = 0;
294 stack_pointer_delta = 0;
295 inhibit_defer_pop = 0;
297 apply_args_value = 0;
303 struct expr_status *p;
308 ggc_mark_rtx (p->x_saveregs_value);
309 ggc_mark_rtx (p->x_apply_args_value);
310 ggc_mark_rtx (p->x_forced_labels);
321 /* Small sanity check that the queue is empty at the end of a function. */
324 finish_expr_for_function ()
330 /* Manage the queue of increment instructions to be output
331 for POSTINCREMENT_EXPR expressions, etc. */
333 /* Queue up to increment (or change) VAR later. BODY says how:
334 BODY should be the same thing you would pass to emit_insn
335 to increment right away. It will go to emit_insn later on.
337 The value is a QUEUED expression to be used in place of VAR
338 where you want to guarantee the pre-incrementation value of VAR. */
341 enqueue_insn (var, body)
344 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
345 body, pending_chain);
346 return pending_chain;
349 /* Use protect_from_queue to convert a QUEUED expression
350 into something that you can put immediately into an instruction.
351 If the queued incrementation has not happened yet,
352 protect_from_queue returns the variable itself.
353 If the incrementation has happened, protect_from_queue returns a temp
354 that contains a copy of the old value of the variable.
356 Any time an rtx which might possibly be a QUEUED is to be put
357 into an instruction, it must be passed through protect_from_queue first.
358 QUEUED expressions are not meaningful in instructions.
360 Do not pass a value through protect_from_queue and then hold
361 on to it for a while before putting it in an instruction!
362 If the queue is flushed in between, incorrect code will result. */
365 protect_from_queue (x, modify)
369 register RTX_CODE code = GET_CODE (x);
371 #if 0 /* A QUEUED can hang around after the queue is forced out. */
372 /* Shortcut for most common case. */
373 if (pending_chain == 0)
379 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
380 use of autoincrement. Make a copy of the contents of the memory
381 location rather than a copy of the address, but not if the value is
382 of mode BLKmode. Don't modify X in place since it might be
384 if (code == MEM && GET_MODE (x) != BLKmode
385 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
388 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
392 rtx temp = gen_reg_rtx (GET_MODE (x));
394 emit_insn_before (gen_move_insn (temp, new),
399 /* Copy the address into a pseudo, so that the returned value
400 remains correct across calls to emit_queue. */
401 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
404 /* Otherwise, recursively protect the subexpressions of all
405 the kinds of rtx's that can contain a QUEUED. */
408 rtx tem = protect_from_queue (XEXP (x, 0), 0);
409 if (tem != XEXP (x, 0))
415 else if (code == PLUS || code == MULT)
417 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
418 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
419 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
428 /* If the increment has not happened, use the variable itself. Copy it
429 into a new pseudo so that the value remains correct across calls to
431 if (QUEUED_INSN (x) == 0)
432 return copy_to_reg (QUEUED_VAR (x));
433 /* If the increment has happened and a pre-increment copy exists,
435 if (QUEUED_COPY (x) != 0)
436 return QUEUED_COPY (x);
437 /* The increment has happened but we haven't set up a pre-increment copy.
438 Set one up now, and use it. */
439 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
440 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
442 return QUEUED_COPY (x);
445 /* Return nonzero if X contains a QUEUED expression:
446 if it contains anything that will be altered by a queued increment.
447 We handle only combinations of MEM, PLUS, MINUS and MULT operators
448 since memory addresses generally contain only those. */
454 register enum rtx_code code = GET_CODE (x);
460 return queued_subexp_p (XEXP (x, 0));
464 return (queued_subexp_p (XEXP (x, 0))
465 || queued_subexp_p (XEXP (x, 1)));
471 /* Perform all the pending incrementations. */
477 while ((p = pending_chain))
479 rtx body = QUEUED_BODY (p);
481 if (GET_CODE (body) == SEQUENCE)
483 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
484 emit_insn (QUEUED_BODY (p));
487 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
488 pending_chain = QUEUED_NEXT (p);
492 /* Copy data from FROM to TO, where the machine modes are not the same.
493 Both modes may be integer, or both may be floating.
494 UNSIGNEDP should be nonzero if FROM is an unsigned type.
495 This causes zero-extension instead of sign-extension. */
498 convert_move (to, from, unsignedp)
499 register rtx to, from;
502 enum machine_mode to_mode = GET_MODE (to);
503 enum machine_mode from_mode = GET_MODE (from);
504 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
505 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
509 /* rtx code for making an equivalent value. */
510 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
512 to = protect_from_queue (to, 1);
513 from = protect_from_queue (from, 0);
515 if (to_real != from_real)
518 /* If FROM is a SUBREG that indicates that we have already done at least
519 the required extension, strip it. We don't handle such SUBREGs as
522 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
523 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
524 >= GET_MODE_SIZE (to_mode))
525 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
526 from = gen_lowpart (to_mode, from), from_mode = to_mode;
528 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
531 if (to_mode == from_mode
532 || (from_mode == VOIDmode && CONSTANT_P (from)))
534 emit_move_insn (to, from);
538 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
540 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
543 if (VECTOR_MODE_P (to_mode))
544 from = gen_rtx_SUBREG (to_mode, from, 0);
546 to = gen_rtx_SUBREG (from_mode, to, 0);
548 emit_move_insn (to, from);
552 if (to_real != from_real)
559 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
561 /* Try converting directly if the insn is supported. */
562 if ((code = can_extend_p (to_mode, from_mode, 0))
565 emit_unop_insn (code, to, from, UNKNOWN);
570 #ifdef HAVE_trunchfqf2
571 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
573 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
577 #ifdef HAVE_trunctqfqf2
578 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
580 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
584 #ifdef HAVE_truncsfqf2
585 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
587 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
591 #ifdef HAVE_truncdfqf2
592 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
594 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
598 #ifdef HAVE_truncxfqf2
599 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
601 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
605 #ifdef HAVE_trunctfqf2
606 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
608 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
613 #ifdef HAVE_trunctqfhf2
614 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
616 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
620 #ifdef HAVE_truncsfhf2
621 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
623 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
627 #ifdef HAVE_truncdfhf2
628 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
630 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
634 #ifdef HAVE_truncxfhf2
635 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
637 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
641 #ifdef HAVE_trunctfhf2
642 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
644 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
649 #ifdef HAVE_truncsftqf2
650 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
652 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
656 #ifdef HAVE_truncdftqf2
657 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
659 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
663 #ifdef HAVE_truncxftqf2
664 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
666 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
670 #ifdef HAVE_trunctftqf2
671 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
673 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
678 #ifdef HAVE_truncdfsf2
679 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
681 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
685 #ifdef HAVE_truncxfsf2
686 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
688 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
692 #ifdef HAVE_trunctfsf2
693 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
695 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
699 #ifdef HAVE_truncxfdf2
700 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
702 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
706 #ifdef HAVE_trunctfdf2
707 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
709 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
721 libcall = extendsfdf2_libfunc;
725 libcall = extendsfxf2_libfunc;
729 libcall = extendsftf2_libfunc;
741 libcall = truncdfsf2_libfunc;
745 libcall = extenddfxf2_libfunc;
749 libcall = extenddftf2_libfunc;
761 libcall = truncxfsf2_libfunc;
765 libcall = truncxfdf2_libfunc;
777 libcall = trunctfsf2_libfunc;
781 libcall = trunctfdf2_libfunc;
793 if (libcall == (rtx) 0)
794 /* This conversion is not implemented yet. */
798 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
800 insns = get_insns ();
802 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
807 /* Now both modes are integers. */
809 /* Handle expanding beyond a word. */
810 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
811 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
818 enum machine_mode lowpart_mode;
819 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
821 /* Try converting directly if the insn is supported. */
822 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
825 /* If FROM is a SUBREG, put it into a register. Do this
826 so that we always generate the same set of insns for
827 better cse'ing; if an intermediate assignment occurred,
828 we won't be doing the operation directly on the SUBREG. */
829 if (optimize > 0 && GET_CODE (from) == SUBREG)
830 from = force_reg (from_mode, from);
831 emit_unop_insn (code, to, from, equiv_code);
834 /* Next, try converting via full word. */
835 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
836 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
837 != CODE_FOR_nothing))
839 if (GET_CODE (to) == REG)
840 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
841 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
842 emit_unop_insn (code, to,
843 gen_lowpart (word_mode, to), equiv_code);
847 /* No special multiword conversion insn; do it by hand. */
850 /* Since we will turn this into a no conflict block, we must ensure
851 that the source does not overlap the target. */
853 if (reg_overlap_mentioned_p (to, from))
854 from = force_reg (from_mode, from);
856 /* Get a copy of FROM widened to a word, if necessary. */
857 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
858 lowpart_mode = word_mode;
860 lowpart_mode = from_mode;
862 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
864 lowpart = gen_lowpart (lowpart_mode, to);
865 emit_move_insn (lowpart, lowfrom);
867 /* Compute the value to put in each remaining word. */
869 fill_value = const0_rtx;
874 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
875 && STORE_FLAG_VALUE == -1)
877 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
879 fill_value = gen_reg_rtx (word_mode);
880 emit_insn (gen_slt (fill_value));
886 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
887 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
889 fill_value = convert_to_mode (word_mode, fill_value, 1);
893 /* Fill the remaining words. */
894 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
896 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
897 rtx subword = operand_subword (to, index, 1, to_mode);
902 if (fill_value != subword)
903 emit_move_insn (subword, fill_value);
906 insns = get_insns ();
909 emit_no_conflict_block (insns, to, from, NULL_RTX,
910 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
914 /* Truncating multi-word to a word or less. */
915 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
916 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
918 if (!((GET_CODE (from) == MEM
919 && ! MEM_VOLATILE_P (from)
920 && direct_load[(int) to_mode]
921 && ! mode_dependent_address_p (XEXP (from, 0)))
922 || GET_CODE (from) == REG
923 || GET_CODE (from) == SUBREG))
924 from = force_reg (from_mode, from);
925 convert_move (to, gen_lowpart (word_mode, from), 0);
929 /* Handle pointer conversion. */ /* SPEE 900220. */
930 if (to_mode == PQImode)
932 if (from_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
935 #ifdef HAVE_truncqipqi2
936 if (HAVE_truncqipqi2)
938 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
941 #endif /* HAVE_truncqipqi2 */
945 if (from_mode == PQImode)
947 if (to_mode != QImode)
949 from = convert_to_mode (QImode, from, unsignedp);
954 #ifdef HAVE_extendpqiqi2
955 if (HAVE_extendpqiqi2)
957 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
960 #endif /* HAVE_extendpqiqi2 */
965 if (to_mode == PSImode)
967 if (from_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
970 #ifdef HAVE_truncsipsi2
971 if (HAVE_truncsipsi2)
973 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
976 #endif /* HAVE_truncsipsi2 */
980 if (from_mode == PSImode)
982 if (to_mode != SImode)
984 from = convert_to_mode (SImode, from, unsignedp);
989 #ifdef HAVE_extendpsisi2
990 if (! unsignedp && HAVE_extendpsisi2)
992 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
995 #endif /* HAVE_extendpsisi2 */
996 #ifdef HAVE_zero_extendpsisi2
997 if (unsignedp && HAVE_zero_extendpsisi2)
999 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1002 #endif /* HAVE_zero_extendpsisi2 */
1007 if (to_mode == PDImode)
1009 if (from_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1012 #ifdef HAVE_truncdipdi2
1013 if (HAVE_truncdipdi2)
1015 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1018 #endif /* HAVE_truncdipdi2 */
1022 if (from_mode == PDImode)
1024 if (to_mode != DImode)
1026 from = convert_to_mode (DImode, from, unsignedp);
1031 #ifdef HAVE_extendpdidi2
1032 if (HAVE_extendpdidi2)
1034 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1037 #endif /* HAVE_extendpdidi2 */
1042 /* Now follow all the conversions between integers
1043 no more than a word long. */
1045 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1046 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1047 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1048 GET_MODE_BITSIZE (from_mode)))
1050 if (!((GET_CODE (from) == MEM
1051 && ! MEM_VOLATILE_P (from)
1052 && direct_load[(int) to_mode]
1053 && ! mode_dependent_address_p (XEXP (from, 0)))
1054 || GET_CODE (from) == REG
1055 || GET_CODE (from) == SUBREG))
1056 from = force_reg (from_mode, from);
1057 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1058 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1059 from = copy_to_reg (from);
1060 emit_move_insn (to, gen_lowpart (to_mode, from));
1064 /* Handle extension. */
1065 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1067 /* Convert directly if that works. */
1068 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1069 != CODE_FOR_nothing)
1071 emit_unop_insn (code, to, from, equiv_code);
1076 enum machine_mode intermediate;
1080 /* Search for a mode to convert via. */
1081 for (intermediate = from_mode; intermediate != VOIDmode;
1082 intermediate = GET_MODE_WIDER_MODE (intermediate))
1083 if (((can_extend_p (to_mode, intermediate, unsignedp)
1084 != CODE_FOR_nothing)
1085 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1086 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1087 GET_MODE_BITSIZE (intermediate))))
1088 && (can_extend_p (intermediate, from_mode, unsignedp)
1089 != CODE_FOR_nothing))
1091 convert_move (to, convert_to_mode (intermediate, from,
1092 unsignedp), unsignedp);
1096 /* No suitable intermediate mode.
1097 Generate what we need with shifts. */
1098 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1099 - GET_MODE_BITSIZE (from_mode), 0);
1100 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1101 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1103 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1106 emit_move_insn (to, tmp);
1111 /* Support special truncate insns for certain modes. */
1113 if (from_mode == DImode && to_mode == SImode)
1115 #ifdef HAVE_truncdisi2
1116 if (HAVE_truncdisi2)
1118 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 if (from_mode == DImode && to_mode == HImode)
1128 #ifdef HAVE_truncdihi2
1129 if (HAVE_truncdihi2)
1131 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 if (from_mode == DImode && to_mode == QImode)
1141 #ifdef HAVE_truncdiqi2
1142 if (HAVE_truncdiqi2)
1144 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 if (from_mode == SImode && to_mode == HImode)
1154 #ifdef HAVE_truncsihi2
1155 if (HAVE_truncsihi2)
1157 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 if (from_mode == SImode && to_mode == QImode)
1167 #ifdef HAVE_truncsiqi2
1168 if (HAVE_truncsiqi2)
1170 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 if (from_mode == HImode && to_mode == QImode)
1180 #ifdef HAVE_trunchiqi2
1181 if (HAVE_trunchiqi2)
1183 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 if (from_mode == TImode && to_mode == DImode)
1193 #ifdef HAVE_trunctidi2
1194 if (HAVE_trunctidi2)
1196 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 if (from_mode == TImode && to_mode == SImode)
1206 #ifdef HAVE_trunctisi2
1207 if (HAVE_trunctisi2)
1209 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 if (from_mode == TImode && to_mode == HImode)
1219 #ifdef HAVE_trunctihi2
1220 if (HAVE_trunctihi2)
1222 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 if (from_mode == TImode && to_mode == QImode)
1232 #ifdef HAVE_trunctiqi2
1233 if (HAVE_trunctiqi2)
1235 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1239 convert_move (to, force_reg (from_mode, from), unsignedp);
1243 /* Handle truncation of volatile memrefs, and so on;
1244 the things that couldn't be truncated directly,
1245 and for which there was no special instruction. */
1246 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1248 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1249 emit_move_insn (to, temp);
1253 /* Mode combination is not recognized. */
1257 /* Return an rtx for a value that would result
1258 from converting X to mode MODE.
1259 Both X and MODE may be floating, or both integer.
1260 UNSIGNEDP is nonzero if X is an unsigned value.
1261 This can be done by referring to a part of X in place
1262 or by copying to a new temporary with conversion.
1264 This function *must not* call protect_from_queue
1265 except when putting X into an insn (in which case convert_move does it). */
1268 convert_to_mode (mode, x, unsignedp)
1269 enum machine_mode mode;
1273 return convert_modes (mode, VOIDmode, x, unsignedp);
1276 /* Return an rtx for a value that would result
1277 from converting X from mode OLDMODE to mode MODE.
1278 Both modes may be floating, or both integer.
1279 UNSIGNEDP is nonzero if X is an unsigned value.
1281 This can be done by referring to a part of X in place
1282 or by copying to a new temporary with conversion.
1284 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1286 This function *must not* call protect_from_queue
1287 except when putting X into an insn (in which case convert_move does it). */
1290 convert_modes (mode, oldmode, x, unsignedp)
1291 enum machine_mode mode, oldmode;
1297 /* If FROM is a SUBREG that indicates that we have already done at least
1298 the required extension, strip it. */
1300 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1301 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1302 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1303 x = gen_lowpart (mode, x);
1305 if (GET_MODE (x) != VOIDmode)
1306 oldmode = GET_MODE (x);
1308 if (mode == oldmode)
1311 /* There is one case that we must handle specially: If we are converting
1312 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1313 we are to interpret the constant as unsigned, gen_lowpart will do
1314 the wrong if the constant appears negative. What we want to do is
1315 make the high-order word of the constant zero, not all ones. */
1317 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1319 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1321 HOST_WIDE_INT val = INTVAL (x);
1323 if (oldmode != VOIDmode
1324 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1326 int width = GET_MODE_BITSIZE (oldmode);
1328 /* We need to zero extend VAL. */
1329 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1332 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1335 /* We can do this with a gen_lowpart if both desired and current modes
1336 are integer, and this is either a constant integer, a register, or a
1337 non-volatile MEM. Except for the constant case where MODE is no
1338 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1340 if ((GET_CODE (x) == CONST_INT
1341 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1342 || (GET_MODE_CLASS (mode) == MODE_INT
1343 && GET_MODE_CLASS (oldmode) == MODE_INT
1344 && (GET_CODE (x) == CONST_DOUBLE
1345 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1346 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1347 && direct_load[(int) mode])
1348 || (GET_CODE (x) == REG
1349 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1350 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1352 /* ?? If we don't know OLDMODE, we have to assume here that
1353 X does not need sign- or zero-extension. This may not be
1354 the case, but it's the best we can do. */
1355 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1356 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1358 HOST_WIDE_INT val = INTVAL (x);
1359 int width = GET_MODE_BITSIZE (oldmode);
1361 /* We must sign or zero-extend in this case. Start by
1362 zero-extending, then sign extend if we need to. */
1363 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1365 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1366 val |= (HOST_WIDE_INT) (-1) << width;
1368 return GEN_INT (trunc_int_for_mode (val, mode));
1371 return gen_lowpart (mode, x);
1374 temp = gen_reg_rtx (mode);
1375 convert_move (temp, x, unsignedp);
1379 /* This macro is used to determine what the largest unit size that
1380 move_by_pieces can use is. */
1382 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1383 move efficiently, as opposed to MOVE_MAX which is the maximum
1384 number of bytes we can move with a single instruction. */
1386 #ifndef MOVE_MAX_PIECES
1387 #define MOVE_MAX_PIECES MOVE_MAX
1390 /* Generate several move instructions to copy LEN bytes
1391 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1392 The caller must pass FROM and TO
1393 through protect_from_queue before calling.
1395 When TO is NULL, the emit_single_push_insn is used to push the
1398 ALIGN is maximum alignment we can assume. */
1401 move_by_pieces (to, from, len, align)
1403 unsigned HOST_WIDE_INT len;
1406 struct move_by_pieces data;
1407 rtx to_addr, from_addr = XEXP (from, 0);
1408 unsigned int max_size = MOVE_MAX_PIECES + 1;
1409 enum machine_mode mode = VOIDmode, tmode;
1410 enum insn_code icode;
1413 data.from_addr = from_addr;
1416 to_addr = XEXP (to, 0);
1419 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1420 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1429 #ifdef STACK_GROWS_DOWNWARD
1435 data.to_addr = to_addr;
1438 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1439 || GET_CODE (from_addr) == POST_INC
1440 || GET_CODE (from_addr) == POST_DEC);
1442 data.explicit_inc_from = 0;
1443 data.explicit_inc_to = 0;
1444 if (data.reverse) data.offset = len;
1447 /* If copying requires more than two move insns,
1448 copy addresses to registers (to make displacements shorter)
1449 and use post-increment if available. */
1450 if (!(data.autinc_from && data.autinc_to)
1451 && move_by_pieces_ninsns (len, align) > 2)
1453 /* Find the mode of the largest move... */
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
1459 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1461 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1462 data.autinc_from = 1;
1463 data.explicit_inc_from = -1;
1465 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1467 data.from_addr = copy_addr_to_reg (from_addr);
1468 data.autinc_from = 1;
1469 data.explicit_inc_from = 1;
1471 if (!data.autinc_from && CONSTANT_P (from_addr))
1472 data.from_addr = copy_addr_to_reg (from_addr);
1473 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1475 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1477 data.explicit_inc_to = -1;
1479 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1481 data.to_addr = copy_addr_to_reg (to_addr);
1483 data.explicit_inc_to = 1;
1485 if (!data.autinc_to && CONSTANT_P (to_addr))
1486 data.to_addr = copy_addr_to_reg (to_addr);
1489 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1490 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1491 align = MOVE_MAX * BITS_PER_UNIT;
1493 /* First move what we can in the largest integer mode, then go to
1494 successively smaller modes. */
1496 while (max_size > 1)
1498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1500 if (GET_MODE_SIZE (tmode) < max_size)
1503 if (mode == VOIDmode)
1506 icode = mov_optab->handlers[(int) mode].insn_code;
1507 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1508 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1510 max_size = GET_MODE_SIZE (mode);
1513 /* The code above should have handled everything. */
1518 /* Return number of insns required to move L bytes by pieces.
1519 ALIGN (in bits) is maximum alignment we can assume. */
1521 static unsigned HOST_WIDE_INT
1522 move_by_pieces_ninsns (l, align)
1523 unsigned HOST_WIDE_INT l;
1526 unsigned HOST_WIDE_INT n_insns = 0;
1527 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1529 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1530 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1531 align = MOVE_MAX * BITS_PER_UNIT;
1533 while (max_size > 1)
1535 enum machine_mode mode = VOIDmode, tmode;
1536 enum insn_code icode;
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
1543 if (mode == VOIDmode)
1546 icode = mov_optab->handlers[(int) mode].insn_code;
1547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1548 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1550 max_size = GET_MODE_SIZE (mode);
1558 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1559 with move instructions for mode MODE. GENFUN is the gen_... function
1560 to make a move insn for that mode. DATA has all the other info. */
1563 move_by_pieces_1 (genfun, mode, data)
1564 rtx (*genfun) PARAMS ((rtx, ...));
1565 enum machine_mode mode;
1566 struct move_by_pieces *data;
1568 unsigned int size = GET_MODE_SIZE (mode);
1569 rtx to1 = NULL_RTX, from1;
1571 while (data->len >= size)
1574 data->offset -= size;
1578 if (data->autinc_to)
1580 to1 = replace_equiv_address (data->to, data->to_addr);
1581 to1 = adjust_address (to1, mode, 0);
1584 to1 = adjust_address (data->to, mode, data->offset);
1587 if (data->autinc_from)
1589 from1 = replace_equiv_address (data->from, data->from_addr);
1590 from1 = adjust_address (from1, mode, 0);
1593 from1 = adjust_address (data->from, mode, data->offset);
1595 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1598 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1601 emit_insn ((*genfun) (to1, from1));
1603 emit_single_push_insn (mode, from1, NULL);
1605 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1606 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1607 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1608 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1610 if (! data->reverse)
1611 data->offset += size;
1617 /* Emit code to move a block Y to a block X.
1618 This may be done with string-move instructions,
1619 with multiple scalar move instructions, or with a library call.
1621 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1623 SIZE is an rtx that says how long they are.
1624 ALIGN is the maximum alignment we can assume they have.
1626 Return the address of the new block, if memcpy is called and returns it,
1630 emit_block_move (x, y, size, align)
1636 #ifdef TARGET_MEM_FUNCTIONS
1638 tree call_expr, arg_list;
1641 if (GET_MODE (x) != BLKmode)
1644 if (GET_MODE (y) != BLKmode)
1647 x = protect_from_queue (x, 1);
1648 y = protect_from_queue (y, 0);
1649 size = protect_from_queue (size, 0);
1651 if (GET_CODE (x) != MEM)
1653 if (GET_CODE (y) != MEM)
1658 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1659 move_by_pieces (x, y, INTVAL (size), align);
1662 /* Try the most limited insn first, because there's no point
1663 including more than one in the machine description unless
1664 the more limited one has some advantage. */
1666 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1667 enum machine_mode mode;
1669 /* Since this is a move insn, we don't care about volatility. */
1672 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1673 mode = GET_MODE_WIDER_MODE (mode))
1675 enum insn_code code = movstr_optab[(int) mode];
1676 insn_operand_predicate_fn pred;
1678 if (code != CODE_FOR_nothing
1679 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1680 here because if SIZE is less than the mode mask, as it is
1681 returned by the macro, it will definitely be less than the
1682 actual mode mask. */
1683 && ((GET_CODE (size) == CONST_INT
1684 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1685 <= (GET_MODE_MASK (mode) >> 1)))
1686 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1687 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1688 || (*pred) (x, BLKmode))
1689 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1690 || (*pred) (y, BLKmode))
1691 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1692 || (*pred) (opalign, VOIDmode)))
1695 rtx last = get_last_insn ();
1698 op2 = convert_to_mode (mode, size, 1);
1699 pred = insn_data[(int) code].operand[2].predicate;
1700 if (pred != 0 && ! (*pred) (op2, mode))
1701 op2 = copy_to_mode_reg (mode, op2);
1703 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1711 delete_insns_since (last);
1717 /* X, Y, or SIZE may have been passed through protect_from_queue.
1719 It is unsafe to save the value generated by protect_from_queue
1720 and reuse it later. Consider what happens if emit_queue is
1721 called before the return value from protect_from_queue is used.
1723 Expansion of the CALL_EXPR below will call emit_queue before
1724 we are finished emitting RTL for argument setup. So if we are
1725 not careful we could get the wrong value for an argument.
1727 To avoid this problem we go ahead and emit code to copy X, Y &
1728 SIZE into new pseudos. We can then place those new pseudos
1729 into an RTL_EXPR and use them later, even after a call to
1732 Note this is not strictly needed for library calls since they
1733 do not call emit_queue before loading their arguments. However,
1734 we may need to have library calls call emit_queue in the future
1735 since failing to do so could cause problems for targets which
1736 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1737 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1738 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1740 #ifdef TARGET_MEM_FUNCTIONS
1741 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1743 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1744 TREE_UNSIGNED (integer_type_node));
1745 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1748 #ifdef TARGET_MEM_FUNCTIONS
1749 /* It is incorrect to use the libcall calling conventions to call
1750 memcpy in this context.
1752 This could be a user call to memcpy and the user may wish to
1753 examine the return value from memcpy.
1755 For targets where libcalls and normal calls have different conventions
1756 for returning pointers, we could end up generating incorrect code.
1758 So instead of using a libcall sequence we build up a suitable
1759 CALL_EXPR and expand the call in the normal fashion. */
1760 if (fn == NULL_TREE)
1764 /* This was copied from except.c, I don't know if all this is
1765 necessary in this context or not. */
1766 fn = get_identifier ("memcpy");
1767 fntype = build_pointer_type (void_type_node);
1768 fntype = build_function_type (fntype, NULL_TREE);
1769 fn = build_decl (FUNCTION_DECL, fn, fntype);
1770 ggc_add_tree_root (&fn, 1);
1771 DECL_EXTERNAL (fn) = 1;
1772 TREE_PUBLIC (fn) = 1;
1773 DECL_ARTIFICIAL (fn) = 1;
1774 TREE_NOTHROW (fn) = 1;
1775 make_decl_rtl (fn, NULL);
1776 assemble_external (fn);
1779 /* We need to make an argument list for the function call.
1781 memcpy has three arguments, the first two are void * addresses and
1782 the last is a size_t byte count for the copy. */
1784 = build_tree_list (NULL_TREE,
1785 make_tree (build_pointer_type (void_type_node), x));
1786 TREE_CHAIN (arg_list)
1787 = build_tree_list (NULL_TREE,
1788 make_tree (build_pointer_type (void_type_node), y));
1789 TREE_CHAIN (TREE_CHAIN (arg_list))
1790 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1791 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1793 /* Now we have to build up the CALL_EXPR itself. */
1794 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1795 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1796 call_expr, arg_list, NULL_TREE);
1797 TREE_SIDE_EFFECTS (call_expr) = 1;
1799 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1801 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1802 VOIDmode, 3, y, Pmode, x, Pmode,
1803 convert_to_mode (TYPE_MODE (integer_type_node), size,
1804 TREE_UNSIGNED (integer_type_node)),
1805 TYPE_MODE (integer_type_node));
1812 /* Copy all or part of a value X into registers starting at REGNO.
1813 The number of registers to be filled is NREGS. */
1816 move_block_to_reg (regno, x, nregs, mode)
1820 enum machine_mode mode;
1823 #ifdef HAVE_load_multiple
1831 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1832 x = validize_mem (force_const_mem (mode, x));
1834 /* See if the machine can do this with a load multiple insn. */
1835 #ifdef HAVE_load_multiple
1836 if (HAVE_load_multiple)
1838 last = get_last_insn ();
1839 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1847 delete_insns_since (last);
1851 for (i = 0; i < nregs; i++)
1852 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1853 operand_subword_force (x, i, mode));
1856 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1857 The number of registers to be filled is NREGS. SIZE indicates the number
1858 of bytes in the object X. */
1861 move_block_from_reg (regno, x, nregs, size)
1868 #ifdef HAVE_store_multiple
1872 enum machine_mode mode;
1877 /* If SIZE is that of a mode no bigger than a word, just use that
1878 mode's store operation. */
1879 if (size <= UNITS_PER_WORD
1880 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1882 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1886 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1887 to the left before storing to memory. Note that the previous test
1888 doesn't handle all cases (e.g. SIZE == 3). */
1889 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1891 rtx tem = operand_subword (x, 0, 1, BLKmode);
1897 shift = expand_shift (LSHIFT_EXPR, word_mode,
1898 gen_rtx_REG (word_mode, regno),
1899 build_int_2 ((UNITS_PER_WORD - size)
1900 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1901 emit_move_insn (tem, shift);
1905 /* See if the machine can do this with a store multiple insn. */
1906 #ifdef HAVE_store_multiple
1907 if (HAVE_store_multiple)
1909 last = get_last_insn ();
1910 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1918 delete_insns_since (last);
1922 for (i = 0; i < nregs; i++)
1924 rtx tem = operand_subword (x, i, 1, BLKmode);
1929 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1933 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1934 registers represented by a PARALLEL. SSIZE represents the total size of
1935 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1937 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1938 the balance will be in what would be the low-order memory addresses, i.e.
1939 left justified for big endian, right justified for little endian. This
1940 happens to be true for the targets currently using this support. If this
1941 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1945 emit_group_load (dst, orig_src, ssize, align)
1953 if (GET_CODE (dst) != PARALLEL)
1956 /* Check for a NULL entry, used to indicate that the parameter goes
1957 both on the stack and in registers. */
1958 if (XEXP (XVECEXP (dst, 0, 0), 0))
1963 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1965 /* Process the pieces. */
1966 for (i = start; i < XVECLEN (dst, 0); i++)
1968 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1969 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1970 unsigned int bytelen = GET_MODE_SIZE (mode);
1973 /* Handle trailing fragments that run over the size of the struct. */
1974 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1976 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1977 bytelen = ssize - bytepos;
1982 /* If we won't be loading directly from memory, protect the real source
1983 from strange tricks we might play; but make sure that the source can
1984 be loaded directly into the destination. */
1986 if (GET_CODE (orig_src) != MEM
1987 && (!CONSTANT_P (orig_src)
1988 || (GET_MODE (orig_src) != mode
1989 && GET_MODE (orig_src) != VOIDmode)))
1991 if (GET_MODE (orig_src) == VOIDmode)
1992 src = gen_reg_rtx (mode);
1994 src = gen_reg_rtx (GET_MODE (orig_src));
1995 emit_move_insn (src, orig_src);
1998 /* Optimize the access just a bit. */
1999 if (GET_CODE (src) == MEM
2000 && align >= GET_MODE_ALIGNMENT (mode)
2001 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2002 && bytelen == GET_MODE_SIZE (mode))
2004 tmps[i] = gen_reg_rtx (mode);
2005 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2007 else if (GET_CODE (src) == CONCAT)
2010 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2011 tmps[i] = XEXP (src, 0);
2012 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2013 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2014 tmps[i] = XEXP (src, 1);
2018 else if (CONSTANT_P (src)
2019 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2022 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2023 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2024 mode, mode, align, ssize);
2026 if (BYTES_BIG_ENDIAN && shift)
2027 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2028 tmps[i], 0, OPTAB_WIDEN);
2033 /* Copy the extracted pieces into the proper (probable) hard regs. */
2034 for (i = start; i < XVECLEN (dst, 0); i++)
2035 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2038 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2039 registers represented by a PARALLEL. SSIZE represents the total size of
2040 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2043 emit_group_store (orig_dst, src, ssize, align)
2051 if (GET_CODE (src) != PARALLEL)
2054 /* Check for a NULL entry, used to indicate that the parameter goes
2055 both on the stack and in registers. */
2056 if (XEXP (XVECEXP (src, 0, 0), 0))
2061 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2063 /* Copy the (probable) hard regs into pseudos. */
2064 for (i = start; i < XVECLEN (src, 0); i++)
2066 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2067 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2068 emit_move_insn (tmps[i], reg);
2072 /* If we won't be storing directly into memory, protect the real destination
2073 from strange tricks we might play. */
2075 if (GET_CODE (dst) == PARALLEL)
2079 /* We can get a PARALLEL dst if there is a conditional expression in
2080 a return statement. In that case, the dst and src are the same,
2081 so no action is necessary. */
2082 if (rtx_equal_p (dst, src))
2085 /* It is unclear if we can ever reach here, but we may as well handle
2086 it. Allocate a temporary, and split this into a store/load to/from
2089 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2090 emit_group_store (temp, src, ssize, align);
2091 emit_group_load (dst, temp, ssize, align);
2094 else if (GET_CODE (dst) != MEM)
2096 dst = gen_reg_rtx (GET_MODE (orig_dst));
2097 /* Make life a bit easier for combine. */
2098 emit_move_insn (dst, const0_rtx);
2101 /* Process the pieces. */
2102 for (i = start; i < XVECLEN (src, 0); i++)
2104 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2105 enum machine_mode mode = GET_MODE (tmps[i]);
2106 unsigned int bytelen = GET_MODE_SIZE (mode);
2108 /* Handle trailing fragments that run over the size of the struct. */
2109 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2111 if (BYTES_BIG_ENDIAN)
2113 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2114 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2115 tmps[i], 0, OPTAB_WIDEN);
2117 bytelen = ssize - bytepos;
2120 /* Optimize the access just a bit. */
2121 if (GET_CODE (dst) == MEM
2122 && align >= GET_MODE_ALIGNMENT (mode)
2123 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2124 && bytelen == GET_MODE_SIZE (mode))
2125 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2127 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2128 mode, tmps[i], align, ssize);
2133 /* Copy from the pseudo into the (probable) hard reg. */
2134 if (GET_CODE (dst) == REG)
2135 emit_move_insn (orig_dst, dst);
2138 /* Generate code to copy a BLKmode object of TYPE out of a
2139 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2140 is null, a stack temporary is created. TGTBLK is returned.
2142 The primary purpose of this routine is to handle functions
2143 that return BLKmode structures in registers. Some machines
2144 (the PA for example) want to return all small structures
2145 in registers regardless of the structure's alignment. */
2148 copy_blkmode_from_reg (tgtblk, srcreg, type)
2153 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2154 rtx src = NULL, dst = NULL;
2155 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2156 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2160 tgtblk = assign_temp (build_qualified_type (type,
2162 | TYPE_QUAL_CONST)),
2164 preserve_temp_slots (tgtblk);
2167 /* This code assumes srcreg is at least a full word. If it isn't,
2168 copy it into a new pseudo which is a full word. */
2169 if (GET_MODE (srcreg) != BLKmode
2170 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2171 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2173 /* Structures whose size is not a multiple of a word are aligned
2174 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2175 machine, this means we must skip the empty high order bytes when
2176 calculating the bit offset. */
2177 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2178 big_endian_correction
2179 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2181 /* Copy the structure BITSIZE bites at a time.
2183 We could probably emit more efficient code for machines which do not use
2184 strict alignment, but it doesn't seem worth the effort at the current
2186 for (bitpos = 0, xbitpos = big_endian_correction;
2187 bitpos < bytes * BITS_PER_UNIT;
2188 bitpos += bitsize, xbitpos += bitsize)
2190 /* We need a new source operand each time xbitpos is on a
2191 word boundary and when xbitpos == big_endian_correction
2192 (the first time through). */
2193 if (xbitpos % BITS_PER_WORD == 0
2194 || xbitpos == big_endian_correction)
2195 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2198 /* We need a new destination operand each time bitpos is on
2200 if (bitpos % BITS_PER_WORD == 0)
2201 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2203 /* Use xbitpos for the source extraction (right justified) and
2204 xbitpos for the destination store (left justified). */
2205 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2206 extract_bit_field (src, bitsize,
2207 xbitpos % BITS_PER_WORD, 1,
2208 NULL_RTX, word_mode, word_mode,
2209 bitsize, BITS_PER_WORD),
2210 bitsize, BITS_PER_WORD);
2216 /* Add a USE expression for REG to the (possibly empty) list pointed
2217 to by CALL_FUSAGE. REG must denote a hard register. */
2220 use_reg (call_fusage, reg)
2221 rtx *call_fusage, reg;
2223 if (GET_CODE (reg) != REG
2224 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2228 = gen_rtx_EXPR_LIST (VOIDmode,
2229 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2232 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2233 starting at REGNO. All of these registers must be hard registers. */
2236 use_regs (call_fusage, regno, nregs)
2243 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2246 for (i = 0; i < nregs; i++)
2247 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2250 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2251 PARALLEL REGS. This is for calls that pass values in multiple
2252 non-contiguous locations. The Irix 6 ABI has examples of this. */
2255 use_group_regs (call_fusage, regs)
2261 for (i = 0; i < XVECLEN (regs, 0); i++)
2263 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2265 /* A NULL entry means the parameter goes both on the stack and in
2266 registers. This can also be a MEM for targets that pass values
2267 partially on the stack and partially in registers. */
2268 if (reg != 0 && GET_CODE (reg) == REG)
2269 use_reg (call_fusage, reg);
2275 can_store_by_pieces (len, constfun, constfundata, align)
2276 unsigned HOST_WIDE_INT len;
2277 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2281 unsigned HOST_WIDE_INT max_size, l;
2282 HOST_WIDE_INT offset = 0;
2283 enum machine_mode mode, tmode;
2284 enum insn_code icode;
2288 if (! MOVE_BY_PIECES_P (len, align))
2291 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2292 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2293 align = MOVE_MAX * BITS_PER_UNIT;
2295 /* We would first store what we can in the largest integer mode, then go to
2296 successively smaller modes. */
2299 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2304 max_size = MOVE_MAX_PIECES + 1;
2305 while (max_size > 1)
2307 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2308 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2309 if (GET_MODE_SIZE (tmode) < max_size)
2312 if (mode == VOIDmode)
2315 icode = mov_optab->handlers[(int) mode].insn_code;
2316 if (icode != CODE_FOR_nothing
2317 && align >= GET_MODE_ALIGNMENT (mode))
2319 unsigned int size = GET_MODE_SIZE (mode);
2326 cst = (*constfun) (constfundata, offset, mode);
2327 if (!LEGITIMATE_CONSTANT_P (cst))
2337 max_size = GET_MODE_SIZE (mode);
2340 /* The code above should have handled everything. */
2348 /* Generate several move instructions to store LEN bytes generated by
2349 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2350 pointer which will be passed as argument in every CONSTFUN call.
2351 ALIGN is maximum alignment we can assume. */
2354 store_by_pieces (to, len, constfun, constfundata, align)
2356 unsigned HOST_WIDE_INT len;
2357 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2361 struct store_by_pieces data;
2363 if (! MOVE_BY_PIECES_P (len, align))
2365 to = protect_from_queue (to, 1);
2366 data.constfun = constfun;
2367 data.constfundata = constfundata;
2370 store_by_pieces_1 (&data, align);
2373 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2374 rtx with BLKmode). The caller must pass TO through protect_from_queue
2375 before calling. ALIGN is maximum alignment we can assume. */
2378 clear_by_pieces (to, len, align)
2380 unsigned HOST_WIDE_INT len;
2383 struct store_by_pieces data;
2385 data.constfun = clear_by_pieces_1;
2386 data.constfundata = NULL;
2389 store_by_pieces_1 (&data, align);
2392 /* Callback routine for clear_by_pieces.
2393 Return const0_rtx unconditionally. */
2396 clear_by_pieces_1 (data, offset, mode)
2397 PTR data ATTRIBUTE_UNUSED;
2398 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2399 enum machine_mode mode ATTRIBUTE_UNUSED;
2404 /* Subroutine of clear_by_pieces and store_by_pieces.
2405 Generate several move instructions to store LEN bytes of block TO. (A MEM
2406 rtx with BLKmode). The caller must pass TO through protect_from_queue
2407 before calling. ALIGN is maximum alignment we can assume. */
2410 store_by_pieces_1 (data, align)
2411 struct store_by_pieces *data;
2414 rtx to_addr = XEXP (data->to, 0);
2415 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2416 enum machine_mode mode = VOIDmode, tmode;
2417 enum insn_code icode;
2420 data->to_addr = to_addr;
2422 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2423 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2425 data->explicit_inc_to = 0;
2427 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2429 data->offset = data->len;
2431 /* If storing requires more than two move insns,
2432 copy addresses to registers (to make displacements shorter)
2433 and use post-increment if available. */
2434 if (!data->autinc_to
2435 && move_by_pieces_ninsns (data->len, align) > 2)
2437 /* Determine the main mode we'll be using. */
2438 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2439 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2440 if (GET_MODE_SIZE (tmode) < max_size)
2443 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2445 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2446 data->autinc_to = 1;
2447 data->explicit_inc_to = -1;
2450 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2451 && ! data->autinc_to)
2453 data->to_addr = copy_addr_to_reg (to_addr);
2454 data->autinc_to = 1;
2455 data->explicit_inc_to = 1;
2458 if ( !data->autinc_to && CONSTANT_P (to_addr))
2459 data->to_addr = copy_addr_to_reg (to_addr);
2462 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2463 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2464 align = MOVE_MAX * BITS_PER_UNIT;
2466 /* First store what we can in the largest integer mode, then go to
2467 successively smaller modes. */
2469 while (max_size > 1)
2471 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2472 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2473 if (GET_MODE_SIZE (tmode) < max_size)
2476 if (mode == VOIDmode)
2479 icode = mov_optab->handlers[(int) mode].insn_code;
2480 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2481 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2483 max_size = GET_MODE_SIZE (mode);
2486 /* The code above should have handled everything. */
2491 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2492 with move instructions for mode MODE. GENFUN is the gen_... function
2493 to make a move insn for that mode. DATA has all the other info. */
2496 store_by_pieces_2 (genfun, mode, data)
2497 rtx (*genfun) PARAMS ((rtx, ...));
2498 enum machine_mode mode;
2499 struct store_by_pieces *data;
2501 unsigned int size = GET_MODE_SIZE (mode);
2504 while (data->len >= size)
2507 data->offset -= size;
2509 if (data->autinc_to)
2511 to1 = replace_equiv_address (data->to, data->to_addr);
2512 to1 = adjust_address (to1, mode, 0);
2515 to1 = adjust_address (data->to, mode, data->offset);
2517 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2518 emit_insn (gen_add2_insn (data->to_addr,
2519 GEN_INT (-(HOST_WIDE_INT) size)));
2521 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2522 emit_insn ((*genfun) (to1, cst));
2524 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2525 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2527 if (! data->reverse)
2528 data->offset += size;
2534 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2535 its length in bytes and ALIGN is the maximum alignment we can is has.
2537 If we call a function that returns the length of the block, return it. */
2540 clear_storage (object, size, align)
2545 #ifdef TARGET_MEM_FUNCTIONS
2547 tree call_expr, arg_list;
2551 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2552 just move a zero. Otherwise, do this a piece at a time. */
2553 if (GET_MODE (object) != BLKmode
2554 && GET_CODE (size) == CONST_INT
2555 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2556 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2559 object = protect_from_queue (object, 1);
2560 size = protect_from_queue (size, 0);
2562 if (GET_CODE (size) == CONST_INT
2563 && MOVE_BY_PIECES_P (INTVAL (size), align))
2564 clear_by_pieces (object, INTVAL (size), align);
2567 /* Try the most limited insn first, because there's no point
2568 including more than one in the machine description unless
2569 the more limited one has some advantage. */
2571 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2572 enum machine_mode mode;
2574 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2575 mode = GET_MODE_WIDER_MODE (mode))
2577 enum insn_code code = clrstr_optab[(int) mode];
2578 insn_operand_predicate_fn pred;
2580 if (code != CODE_FOR_nothing
2581 /* We don't need MODE to be narrower than
2582 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2583 the mode mask, as it is returned by the macro, it will
2584 definitely be less than the actual mode mask. */
2585 && ((GET_CODE (size) == CONST_INT
2586 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2587 <= (GET_MODE_MASK (mode) >> 1)))
2588 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2589 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2590 || (*pred) (object, BLKmode))
2591 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2592 || (*pred) (opalign, VOIDmode)))
2595 rtx last = get_last_insn ();
2598 op1 = convert_to_mode (mode, size, 1);
2599 pred = insn_data[(int) code].operand[1].predicate;
2600 if (pred != 0 && ! (*pred) (op1, mode))
2601 op1 = copy_to_mode_reg (mode, op1);
2603 pat = GEN_FCN ((int) code) (object, op1, opalign);
2610 delete_insns_since (last);
2614 /* OBJECT or SIZE may have been passed through protect_from_queue.
2616 It is unsafe to save the value generated by protect_from_queue
2617 and reuse it later. Consider what happens if emit_queue is
2618 called before the return value from protect_from_queue is used.
2620 Expansion of the CALL_EXPR below will call emit_queue before
2621 we are finished emitting RTL for argument setup. So if we are
2622 not careful we could get the wrong value for an argument.
2624 To avoid this problem we go ahead and emit code to copy OBJECT
2625 and SIZE into new pseudos. We can then place those new pseudos
2626 into an RTL_EXPR and use them later, even after a call to
2629 Note this is not strictly needed for library calls since they
2630 do not call emit_queue before loading their arguments. However,
2631 we may need to have library calls call emit_queue in the future
2632 since failing to do so could cause problems for targets which
2633 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2634 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2636 #ifdef TARGET_MEM_FUNCTIONS
2637 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2639 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2640 TREE_UNSIGNED (integer_type_node));
2641 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2644 #ifdef TARGET_MEM_FUNCTIONS
2645 /* It is incorrect to use the libcall calling conventions to call
2646 memset in this context.
2648 This could be a user call to memset and the user may wish to
2649 examine the return value from memset.
2651 For targets where libcalls and normal calls have different
2652 conventions for returning pointers, we could end up generating
2655 So instead of using a libcall sequence we build up a suitable
2656 CALL_EXPR and expand the call in the normal fashion. */
2657 if (fn == NULL_TREE)
2661 /* This was copied from except.c, I don't know if all this is
2662 necessary in this context or not. */
2663 fn = get_identifier ("memset");
2664 fntype = build_pointer_type (void_type_node);
2665 fntype = build_function_type (fntype, NULL_TREE);
2666 fn = build_decl (FUNCTION_DECL, fn, fntype);
2667 ggc_add_tree_root (&fn, 1);
2668 DECL_EXTERNAL (fn) = 1;
2669 TREE_PUBLIC (fn) = 1;
2670 DECL_ARTIFICIAL (fn) = 1;
2671 TREE_NOTHROW (fn) = 1;
2672 make_decl_rtl (fn, NULL);
2673 assemble_external (fn);
2676 /* We need to make an argument list for the function call.
2678 memset has three arguments, the first is a void * addresses, the
2679 second a integer with the initialization value, the last is a
2680 size_t byte count for the copy. */
2682 = build_tree_list (NULL_TREE,
2683 make_tree (build_pointer_type (void_type_node),
2685 TREE_CHAIN (arg_list)
2686 = build_tree_list (NULL_TREE,
2687 make_tree (integer_type_node, const0_rtx));
2688 TREE_CHAIN (TREE_CHAIN (arg_list))
2689 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2690 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2692 /* Now we have to build up the CALL_EXPR itself. */
2693 call_expr = build1 (ADDR_EXPR,
2694 build_pointer_type (TREE_TYPE (fn)), fn);
2695 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2696 call_expr, arg_list, NULL_TREE);
2697 TREE_SIDE_EFFECTS (call_expr) = 1;
2699 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2701 emit_library_call (bzero_libfunc, LCT_NORMAL,
2702 VOIDmode, 2, object, Pmode, size,
2703 TYPE_MODE (integer_type_node));
2711 /* Generate code to copy Y into X.
2712 Both Y and X must have the same mode, except that
2713 Y can be a constant with VOIDmode.
2714 This mode cannot be BLKmode; use emit_block_move for that.
2716 Return the last instruction emitted. */
2719 emit_move_insn (x, y)
2722 enum machine_mode mode = GET_MODE (x);
2723 rtx y_cst = NULL_RTX;
2726 x = protect_from_queue (x, 1);
2727 y = protect_from_queue (y, 0);
2729 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2732 /* Never force constant_p_rtx to memory. */
2733 if (GET_CODE (y) == CONSTANT_P_RTX)
2735 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2738 y = force_const_mem (mode, y);
2741 /* If X or Y are memory references, verify that their addresses are valid
2743 if (GET_CODE (x) == MEM
2744 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2745 && ! push_operand (x, GET_MODE (x)))
2747 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2748 x = validize_mem (x);
2750 if (GET_CODE (y) == MEM
2751 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2753 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2754 y = validize_mem (y);
2756 if (mode == BLKmode)
2759 last_insn = emit_move_insn_1 (x, y);
2761 if (y_cst && GET_CODE (x) == REG)
2762 REG_NOTES (last_insn)
2763 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2768 /* Low level part of emit_move_insn.
2769 Called just like emit_move_insn, but assumes X and Y
2770 are basically valid. */
2773 emit_move_insn_1 (x, y)
2776 enum machine_mode mode = GET_MODE (x);
2777 enum machine_mode submode;
2778 enum mode_class class = GET_MODE_CLASS (mode);
2781 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2784 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2786 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2788 /* Expand complex moves by moving real part and imag part, if possible. */
2789 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2790 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2792 (class == MODE_COMPLEX_INT
2793 ? MODE_INT : MODE_FLOAT),
2795 && (mov_optab->handlers[(int) submode].insn_code
2796 != CODE_FOR_nothing))
2798 /* Don't split destination if it is a stack push. */
2799 int stack = push_operand (x, GET_MODE (x));
2801 #ifdef PUSH_ROUNDING
2802 /* In case we output to the stack, but the size is smaller machine can
2803 push exactly, we need to use move instructions. */
2805 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2808 int offset1, offset2;
2810 /* Do not use anti_adjust_stack, since we don't want to update
2811 stack_pointer_delta. */
2812 temp = expand_binop (Pmode,
2813 #ifdef STACK_GROWS_DOWNWARD
2820 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2824 if (temp != stack_pointer_rtx)
2825 emit_move_insn (stack_pointer_rtx, temp);
2826 #ifdef STACK_GROWS_DOWNWARD
2828 offset2 = GET_MODE_SIZE (submode);
2830 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2831 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2832 + GET_MODE_SIZE (submode));
2834 emit_move_insn (change_address (x, submode,
2835 gen_rtx_PLUS (Pmode,
2837 GEN_INT (offset1))),
2838 gen_realpart (submode, y));
2839 emit_move_insn (change_address (x, submode,
2840 gen_rtx_PLUS (Pmode,
2842 GEN_INT (offset2))),
2843 gen_imagpart (submode, y));
2847 /* If this is a stack, push the highpart first, so it
2848 will be in the argument order.
2850 In that case, change_address is used only to convert
2851 the mode, not to change the address. */
2854 /* Note that the real part always precedes the imag part in memory
2855 regardless of machine's endianness. */
2856 #ifdef STACK_GROWS_DOWNWARD
2857 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2858 (gen_rtx_MEM (submode, XEXP (x, 0)),
2859 gen_imagpart (submode, y)));
2860 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2861 (gen_rtx_MEM (submode, XEXP (x, 0)),
2862 gen_realpart (submode, y)));
2864 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2865 (gen_rtx_MEM (submode, XEXP (x, 0)),
2866 gen_realpart (submode, y)));
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_imagpart (submode, y)));
2874 rtx realpart_x, realpart_y;
2875 rtx imagpart_x, imagpart_y;
2877 /* If this is a complex value with each part being smaller than a
2878 word, the usual calling sequence will likely pack the pieces into
2879 a single register. Unfortunately, SUBREG of hard registers only
2880 deals in terms of words, so we have a problem converting input
2881 arguments to the CONCAT of two registers that is used elsewhere
2882 for complex values. If this is before reload, we can copy it into
2883 memory and reload. FIXME, we should see about using extract and
2884 insert on integer registers, but complex short and complex char
2885 variables should be rarely used. */
2886 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2887 && (reload_in_progress | reload_completed) == 0)
2889 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2890 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2892 if (packed_dest_p || packed_src_p)
2894 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2895 ? MODE_FLOAT : MODE_INT);
2897 enum machine_mode reg_mode
2898 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2900 if (reg_mode != BLKmode)
2902 rtx mem = assign_stack_temp (reg_mode,
2903 GET_MODE_SIZE (mode), 0);
2904 rtx cmem = adjust_address (mem, mode, 0);
2907 = N_("function using short complex types cannot be inline");
2911 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2912 emit_move_insn_1 (cmem, y);
2913 return emit_move_insn_1 (sreg, mem);
2917 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2918 emit_move_insn_1 (mem, sreg);
2919 return emit_move_insn_1 (x, cmem);
2925 realpart_x = gen_realpart (submode, x);
2926 realpart_y = gen_realpart (submode, y);
2927 imagpart_x = gen_imagpart (submode, x);
2928 imagpart_y = gen_imagpart (submode, y);
2930 /* Show the output dies here. This is necessary for SUBREGs
2931 of pseudos since we cannot track their lifetimes correctly;
2932 hard regs shouldn't appear here except as return values.
2933 We never want to emit such a clobber after reload. */
2935 && ! (reload_in_progress || reload_completed)
2936 && (GET_CODE (realpart_x) == SUBREG
2937 || GET_CODE (imagpart_x) == SUBREG))
2939 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2942 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2943 (realpart_x, realpart_y));
2944 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2945 (imagpart_x, imagpart_y));
2948 return get_last_insn ();
2951 /* This will handle any multi-word mode that lacks a move_insn pattern.
2952 However, you will get better code if you define such patterns,
2953 even if they must turn into multiple assembler instructions. */
2954 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2960 #ifdef PUSH_ROUNDING
2962 /* If X is a push on the stack, do the push now and replace
2963 X with a reference to the stack pointer. */
2964 if (push_operand (x, GET_MODE (x)))
2969 /* Do not use anti_adjust_stack, since we don't want to update
2970 stack_pointer_delta. */
2971 temp = expand_binop (Pmode,
2972 #ifdef STACK_GROWS_DOWNWARD
2979 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2983 if (temp != stack_pointer_rtx)
2984 emit_move_insn (stack_pointer_rtx, temp);
2986 code = GET_CODE (XEXP (x, 0));
2987 /* Just hope that small offsets off SP are OK. */
2988 if (code == POST_INC)
2989 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2990 GEN_INT (-(HOST_WIDE_INT)
2991 GET_MODE_SIZE (GET_MODE (x))));
2992 else if (code == POST_DEC)
2993 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2994 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2996 temp = stack_pointer_rtx;
2998 x = change_address (x, VOIDmode, temp);
3002 /* If we are in reload, see if either operand is a MEM whose address
3003 is scheduled for replacement. */
3004 if (reload_in_progress && GET_CODE (x) == MEM
3005 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3006 x = replace_equiv_address_nv (x, inner);
3007 if (reload_in_progress && GET_CODE (y) == MEM
3008 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3009 y = replace_equiv_address_nv (y, inner);
3015 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3018 rtx xpart = operand_subword (x, i, 1, mode);
3019 rtx ypart = operand_subword (y, i, 1, mode);
3021 /* If we can't get a part of Y, put Y into memory if it is a
3022 constant. Otherwise, force it into a register. If we still
3023 can't get a part of Y, abort. */
3024 if (ypart == 0 && CONSTANT_P (y))
3026 y = force_const_mem (mode, y);
3027 ypart = operand_subword (y, i, 1, mode);
3029 else if (ypart == 0)
3030 ypart = operand_subword_force (y, i, mode);
3032 if (xpart == 0 || ypart == 0)
3035 need_clobber |= (GET_CODE (xpart) == SUBREG);
3037 last_insn = emit_move_insn (xpart, ypart);
3040 seq = gen_sequence ();
3043 /* Show the output dies here. This is necessary for SUBREGs
3044 of pseudos since we cannot track their lifetimes correctly;
3045 hard regs shouldn't appear here except as return values.
3046 We never want to emit such a clobber after reload. */
3048 && ! (reload_in_progress || reload_completed)
3049 && need_clobber != 0)
3051 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3062 /* Pushing data onto the stack. */
3064 /* Push a block of length SIZE (perhaps variable)
3065 and return an rtx to address the beginning of the block.
3066 Note that it is not possible for the value returned to be a QUEUED.
3067 The value may be virtual_outgoing_args_rtx.
3069 EXTRA is the number of bytes of padding to push in addition to SIZE.
3070 BELOW nonzero means this padding comes at low addresses;
3071 otherwise, the padding comes at high addresses. */
3074 push_block (size, extra, below)
3080 size = convert_modes (Pmode, ptr_mode, size, 1);
3081 if (CONSTANT_P (size))
3082 anti_adjust_stack (plus_constant (size, extra));
3083 else if (GET_CODE (size) == REG && extra == 0)
3084 anti_adjust_stack (size);
3087 temp = copy_to_mode_reg (Pmode, size);
3089 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3090 temp, 0, OPTAB_LIB_WIDEN);
3091 anti_adjust_stack (temp);
3094 #ifndef STACK_GROWS_DOWNWARD
3095 #ifdef ARGS_GROW_DOWNWARD
3096 if (!ACCUMULATE_OUTGOING_ARGS)
3104 /* Return the lowest stack address when STACK or ARGS grow downward and
3105 we are not aaccumulating outgoing arguments (the c4x port uses such
3107 temp = virtual_outgoing_args_rtx;
3108 if (extra != 0 && below)
3109 temp = plus_constant (temp, extra);
3113 if (GET_CODE (size) == CONST_INT)
3114 temp = plus_constant (virtual_outgoing_args_rtx,
3115 -INTVAL (size) - (below ? 0 : extra));
3116 else if (extra != 0 && !below)
3117 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3118 negate_rtx (Pmode, plus_constant (size, extra)));
3120 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3121 negate_rtx (Pmode, size));
3124 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3128 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3129 block of SIZE bytes. */
3132 get_push_address (size)
3137 if (STACK_PUSH_CODE == POST_DEC)
3138 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3139 else if (STACK_PUSH_CODE == POST_INC)
3140 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3142 temp = stack_pointer_rtx;
3144 return copy_to_reg (temp);
3147 /* Emit single push insn. */
3149 emit_single_push_insn (mode, x, type)
3151 enum machine_mode mode;
3154 #ifdef PUSH_ROUNDING
3156 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3159 if (GET_MODE_SIZE (mode) == rounded_size)
3160 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3163 #ifdef STACK_GROWS_DOWNWARD
3164 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3165 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3167 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3168 GEN_INT (rounded_size));
3170 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3173 dest = gen_rtx_MEM (mode, dest_addr);
3175 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3179 set_mem_attributes (dest, type, 1);
3180 /* Function incoming arguments may overlap with sibling call
3181 outgoing arguments and we cannot allow reordering of reads
3182 from function arguments with stores to outgoing arguments
3183 of sibling calls. */
3184 MEM_ALIAS_SET (dest) = 0;
3186 emit_move_insn (dest, x);
3192 /* Generate code to push X onto the stack, assuming it has mode MODE and
3194 MODE is redundant except when X is a CONST_INT (since they don't
3196 SIZE is an rtx for the size of data to be copied (in bytes),
3197 needed only if X is BLKmode.
3199 ALIGN (in bits) is maximum alignment we can assume.
3201 If PARTIAL and REG are both nonzero, then copy that many of the first
3202 words of X into registers starting with REG, and push the rest of X.
3203 The amount of space pushed is decreased by PARTIAL words,
3204 rounded *down* to a multiple of PARM_BOUNDARY.
3205 REG must be a hard register in this case.
3206 If REG is zero but PARTIAL is not, take any all others actions for an
3207 argument partially in registers, but do not actually load any
3210 EXTRA is the amount in bytes of extra space to leave next to this arg.
3211 This is ignored if an argument block has already been allocated.
3213 On a machine that lacks real push insns, ARGS_ADDR is the address of
3214 the bottom of the argument block for this call. We use indexing off there
3215 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3216 argument block has not been preallocated.
3218 ARGS_SO_FAR is the size of args previously pushed for this call.
3220 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3221 for arguments passed in registers. If nonzero, it will be the number
3222 of bytes required. */
3225 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3226 args_addr, args_so_far, reg_parm_stack_space,
3229 enum machine_mode mode;
3238 int reg_parm_stack_space;
3242 enum direction stack_direction
3243 #ifdef STACK_GROWS_DOWNWARD
3249 /* Decide where to pad the argument: `downward' for below,
3250 `upward' for above, or `none' for don't pad it.
3251 Default is below for small data on big-endian machines; else above. */
3252 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3254 /* Invert direction if stack is post-update. */
3255 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3256 if (where_pad != none)
3257 where_pad = (where_pad == downward ? upward : downward);
3259 xinner = x = protect_from_queue (x, 0);
3261 if (mode == BLKmode)
3263 /* Copy a block into the stack, entirely or partially. */
3266 int used = partial * UNITS_PER_WORD;
3267 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3275 /* USED is now the # of bytes we need not copy to the stack
3276 because registers will take care of them. */
3279 xinner = adjust_address (xinner, BLKmode, used);
3281 /* If the partial register-part of the arg counts in its stack size,
3282 skip the part of stack space corresponding to the registers.
3283 Otherwise, start copying to the beginning of the stack space,
3284 by setting SKIP to 0. */
3285 skip = (reg_parm_stack_space == 0) ? 0 : used;
3287 #ifdef PUSH_ROUNDING
3288 /* Do it with several push insns if that doesn't take lots of insns
3289 and if there is no difficulty with push insns that skip bytes
3290 on the stack for alignment purposes. */
3293 && GET_CODE (size) == CONST_INT
3295 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3296 /* Here we avoid the case of a structure whose weak alignment
3297 forces many pushes of a small amount of data,
3298 and such small pushes do rounding that causes trouble. */
3299 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3300 || align >= BIGGEST_ALIGNMENT
3301 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3302 == (align / BITS_PER_UNIT)))
3303 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3305 /* Push padding now if padding above and stack grows down,
3306 or if padding below and stack grows up.
3307 But if space already allocated, this has already been done. */
3308 if (extra && args_addr == 0
3309 && where_pad != none && where_pad != stack_direction)
3310 anti_adjust_stack (GEN_INT (extra));
3312 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3314 if (current_function_check_memory_usage && ! in_check_memory_usage)
3318 in_check_memory_usage = 1;
3319 temp = get_push_address (INTVAL (size) - used);
3320 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3321 emit_library_call (chkr_copy_bitmap_libfunc,
3322 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3323 Pmode, XEXP (xinner, 0), Pmode,
3324 GEN_INT (INTVAL (size) - used),
3325 TYPE_MODE (sizetype));
3327 emit_library_call (chkr_set_right_libfunc,
3328 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3329 Pmode, GEN_INT (INTVAL (size) - used),
3330 TYPE_MODE (sizetype),
3331 GEN_INT (MEMORY_USE_RW),
3332 TYPE_MODE (integer_type_node));
3333 in_check_memory_usage = 0;
3337 #endif /* PUSH_ROUNDING */
3341 /* Otherwise make space on the stack and copy the data
3342 to the address of that space. */
3344 /* Deduct words put into registers from the size we must copy. */
3347 if (GET_CODE (size) == CONST_INT)
3348 size = GEN_INT (INTVAL (size) - used);
3350 size = expand_binop (GET_MODE (size), sub_optab, size,
3351 GEN_INT (used), NULL_RTX, 0,
3355 /* Get the address of the stack space.
3356 In this case, we do not deal with EXTRA separately.
3357 A single stack adjust will do. */
3360 temp = push_block (size, extra, where_pad == downward);
3363 else if (GET_CODE (args_so_far) == CONST_INT)
3364 temp = memory_address (BLKmode,
3365 plus_constant (args_addr,
3366 skip + INTVAL (args_so_far)));
3368 temp = memory_address (BLKmode,
3369 plus_constant (gen_rtx_PLUS (Pmode,
3373 if (current_function_check_memory_usage && ! in_check_memory_usage)
3375 in_check_memory_usage = 1;
3376 target = copy_to_reg (temp);
3377 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3378 emit_library_call (chkr_copy_bitmap_libfunc,
3379 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3381 XEXP (xinner, 0), Pmode,
3382 size, TYPE_MODE (sizetype));
3384 emit_library_call (chkr_set_right_libfunc,
3385 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3387 size, TYPE_MODE (sizetype),
3388 GEN_INT (MEMORY_USE_RW),
3389 TYPE_MODE (integer_type_node));
3390 in_check_memory_usage = 0;
3393 target = gen_rtx_MEM (BLKmode, temp);
3397 set_mem_attributes (target, type, 1);
3398 /* Function incoming arguments may overlap with sibling call
3399 outgoing arguments and we cannot allow reordering of reads
3400 from function arguments with stores to outgoing arguments
3401 of sibling calls. */
3402 MEM_ALIAS_SET (target) = 0;
3405 /* TEMP is the address of the block. Copy the data there. */
3406 if (GET_CODE (size) == CONST_INT
3407 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3409 move_by_pieces (target, xinner, INTVAL (size), align);
3414 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3415 enum machine_mode mode;
3417 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3419 mode = GET_MODE_WIDER_MODE (mode))
3421 enum insn_code code = movstr_optab[(int) mode];
3422 insn_operand_predicate_fn pred;
3424 if (code != CODE_FOR_nothing
3425 && ((GET_CODE (size) == CONST_INT
3426 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3427 <= (GET_MODE_MASK (mode) >> 1)))
3428 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3429 && (!(pred = insn_data[(int) code].operand[0].predicate)
3430 || ((*pred) (target, BLKmode)))
3431 && (!(pred = insn_data[(int) code].operand[1].predicate)
3432 || ((*pred) (xinner, BLKmode)))
3433 && (!(pred = insn_data[(int) code].operand[3].predicate)
3434 || ((*pred) (opalign, VOIDmode))))
3436 rtx op2 = convert_to_mode (mode, size, 1);
3437 rtx last = get_last_insn ();
3440 pred = insn_data[(int) code].operand[2].predicate;
3441 if (pred != 0 && ! (*pred) (op2, mode))
3442 op2 = copy_to_mode_reg (mode, op2);
3444 pat = GEN_FCN ((int) code) (target, xinner,
3452 delete_insns_since (last);
3457 if (!ACCUMULATE_OUTGOING_ARGS)
3459 /* If the source is referenced relative to the stack pointer,
3460 copy it to another register to stabilize it. We do not need
3461 to do this if we know that we won't be changing sp. */
3463 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3464 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3465 temp = copy_to_reg (temp);
3468 /* Make inhibit_defer_pop nonzero around the library call
3469 to force it to pop the bcopy-arguments right away. */
3471 #ifdef TARGET_MEM_FUNCTIONS
3472 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3473 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3474 convert_to_mode (TYPE_MODE (sizetype),
3475 size, TREE_UNSIGNED (sizetype)),
3476 TYPE_MODE (sizetype));
3478 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3479 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3480 convert_to_mode (TYPE_MODE (integer_type_node),
3482 TREE_UNSIGNED (integer_type_node)),
3483 TYPE_MODE (integer_type_node));
3488 else if (partial > 0)
3490 /* Scalar partly in registers. */
3492 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3495 /* # words of start of argument
3496 that we must make space for but need not store. */
3497 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3498 int args_offset = INTVAL (args_so_far);
3501 /* Push padding now if padding above and stack grows down,
3502 or if padding below and stack grows up.
3503 But if space already allocated, this has already been done. */
3504 if (extra && args_addr == 0
3505 && where_pad != none && where_pad != stack_direction)
3506 anti_adjust_stack (GEN_INT (extra));
3508 /* If we make space by pushing it, we might as well push
3509 the real data. Otherwise, we can leave OFFSET nonzero
3510 and leave the space uninitialized. */
3514 /* Now NOT_STACK gets the number of words that we don't need to
3515 allocate on the stack. */
3516 not_stack = partial - offset;
3518 /* If the partial register-part of the arg counts in its stack size,
3519 skip the part of stack space corresponding to the registers.
3520 Otherwise, start copying to the beginning of the stack space,
3521 by setting SKIP to 0. */
3522 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3524 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3525 x = validize_mem (force_const_mem (mode, x));
3527 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3528 SUBREGs of such registers are not allowed. */
3529 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3530 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3531 x = copy_to_reg (x);
3533 /* Loop over all the words allocated on the stack for this arg. */
3534 /* We can do it by words, because any scalar bigger than a word
3535 has a size a multiple of a word. */
3536 #ifndef PUSH_ARGS_REVERSED
3537 for (i = not_stack; i < size; i++)
3539 for (i = size - 1; i >= not_stack; i--)
3541 if (i >= not_stack + offset)
3542 emit_push_insn (operand_subword_force (x, i, mode),
3543 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3545 GEN_INT (args_offset + ((i - not_stack + skip)
3547 reg_parm_stack_space, alignment_pad);
3552 rtx target = NULL_RTX;
3555 /* Push padding now if padding above and stack grows down,
3556 or if padding below and stack grows up.
3557 But if space already allocated, this has already been done. */
3558 if (extra && args_addr == 0
3559 && where_pad != none && where_pad != stack_direction)
3560 anti_adjust_stack (GEN_INT (extra));
3562 #ifdef PUSH_ROUNDING
3563 if (args_addr == 0 && PUSH_ARGS)
3564 emit_single_push_insn (mode, x, type);
3568 if (GET_CODE (args_so_far) == CONST_INT)
3570 = memory_address (mode,
3571 plus_constant (args_addr,
3572 INTVAL (args_so_far)));
3574 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3577 dest = gen_rtx_MEM (mode, addr);
3580 set_mem_attributes (dest, type, 1);
3581 /* Function incoming arguments may overlap with sibling call
3582 outgoing arguments and we cannot allow reordering of reads
3583 from function arguments with stores to outgoing arguments
3584 of sibling calls. */
3585 MEM_ALIAS_SET (dest) = 0;
3588 emit_move_insn (dest, x);
3592 if (current_function_check_memory_usage && ! in_check_memory_usage)
3594 in_check_memory_usage = 1;
3596 target = get_push_address (GET_MODE_SIZE (mode));
3598 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3599 emit_library_call (chkr_copy_bitmap_libfunc,
3600 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3601 Pmode, XEXP (x, 0), Pmode,
3602 GEN_INT (GET_MODE_SIZE (mode)),
3603 TYPE_MODE (sizetype));
3605 emit_library_call (chkr_set_right_libfunc,
3606 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3607 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3608 TYPE_MODE (sizetype),
3609 GEN_INT (MEMORY_USE_RW),
3610 TYPE_MODE (integer_type_node));
3611 in_check_memory_usage = 0;
3616 /* If part should go in registers, copy that part
3617 into the appropriate registers. Do this now, at the end,
3618 since mem-to-mem copies above may do function calls. */
3619 if (partial > 0 && reg != 0)
3621 /* Handle calls that pass values in multiple non-contiguous locations.
3622 The Irix 6 ABI has examples of this. */
3623 if (GET_CODE (reg) == PARALLEL)
3624 emit_group_load (reg, x, -1, align); /* ??? size? */
3626 move_block_to_reg (REGNO (reg), x, partial, mode);
3629 if (extra && args_addr == 0 && where_pad == stack_direction)
3630 anti_adjust_stack (GEN_INT (extra));
3632 if (alignment_pad && args_addr == 0)
3633 anti_adjust_stack (alignment_pad);
3636 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3644 /* Only registers can be subtargets. */
3645 || GET_CODE (x) != REG
3646 /* If the register is readonly, it can't be set more than once. */
3647 || RTX_UNCHANGING_P (x)
3648 /* Don't use hard regs to avoid extending their life. */
3649 || REGNO (x) < FIRST_PSEUDO_REGISTER
3650 /* Avoid subtargets inside loops,
3651 since they hide some invariant expressions. */
3652 || preserve_subexpressions_p ())
3656 /* Expand an assignment that stores the value of FROM into TO.
3657 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3658 (This may contain a QUEUED rtx;
3659 if the value is constant, this rtx is a constant.)
3660 Otherwise, the returned value is NULL_RTX.
3662 SUGGEST_REG is no longer actually used.
3663 It used to mean, copy the value through a register
3664 and return that register, if that is possible.
3665 We now use WANT_VALUE to decide whether to do this. */
3668 expand_assignment (to, from, want_value, suggest_reg)
3671 int suggest_reg ATTRIBUTE_UNUSED;
3673 register rtx to_rtx = 0;
3676 /* Don't crash if the lhs of the assignment was erroneous. */
3678 if (TREE_CODE (to) == ERROR_MARK)
3680 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3681 return want_value ? result : NULL_RTX;
3684 /* Assignment of a structure component needs special treatment
3685 if the structure component's rtx is not simply a MEM.
3686 Assignment of an array element at a constant index, and assignment of
3687 an array element in an unaligned packed structure field, has the same
3690 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3691 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3693 enum machine_mode mode1;
3694 HOST_WIDE_INT bitsize, bitpos;
3699 unsigned int alignment;
3702 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3703 &unsignedp, &volatilep, &alignment);
3705 /* If we are going to use store_bit_field and extract_bit_field,
3706 make sure to_rtx will be safe for multiple use. */
3708 if (mode1 == VOIDmode && want_value)
3709 tem = stabilize_reference (tem);
3711 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3714 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3716 if (GET_CODE (to_rtx) != MEM)
3719 if (GET_MODE (offset_rtx) != ptr_mode)
3721 #ifdef POINTERS_EXTEND_UNSIGNED
3722 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3724 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3728 /* A constant address in TO_RTX can have VOIDmode, we must not try
3729 to call force_reg for that case. Avoid that case. */
3730 if (GET_CODE (to_rtx) == MEM
3731 && GET_MODE (to_rtx) == BLKmode
3732 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3734 && (bitpos % bitsize) == 0
3735 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3736 && alignment == GET_MODE_ALIGNMENT (mode1))
3739 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3741 if (GET_CODE (XEXP (temp, 0)) == REG)
3744 to_rtx = (replace_equiv_address
3745 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3750 to_rtx = change_address (to_rtx, VOIDmode,
3751 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3752 force_reg (ptr_mode,
3758 if (GET_CODE (to_rtx) == MEM)
3760 /* When the offset is zero, to_rtx is the address of the
3761 structure we are storing into, and hence may be shared.
3762 We must make a new MEM before setting the volatile bit. */
3764 to_rtx = copy_rtx (to_rtx);
3766 MEM_VOLATILE_P (to_rtx) = 1;
3768 #if 0 /* This was turned off because, when a field is volatile
3769 in an object which is not volatile, the object may be in a register,
3770 and then we would abort over here. */
3776 if (TREE_CODE (to) == COMPONENT_REF
3777 && TREE_READONLY (TREE_OPERAND (to, 1)))
3780 to_rtx = copy_rtx (to_rtx);
3782 RTX_UNCHANGING_P (to_rtx) = 1;
3785 /* Check the access. */
3786 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3791 enum machine_mode best_mode;
3793 best_mode = get_best_mode (bitsize, bitpos,
3794 TYPE_ALIGN (TREE_TYPE (tem)),
3796 if (best_mode == VOIDmode)
3799 best_mode_size = GET_MODE_BITSIZE (best_mode);
3800 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3801 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3802 size *= GET_MODE_SIZE (best_mode);
3804 /* Check the access right of the pointer. */
3805 in_check_memory_usage = 1;
3807 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3808 VOIDmode, 3, to_addr, Pmode,
3809 GEN_INT (size), TYPE_MODE (sizetype),
3810 GEN_INT (MEMORY_USE_WO),
3811 TYPE_MODE (integer_type_node));
3812 in_check_memory_usage = 0;
3815 /* If this is a varying-length object, we must get the address of
3816 the source and do an explicit block move. */
3819 unsigned int from_align;
3820 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3822 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3824 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3825 MIN (alignment, from_align));
3832 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3834 /* Spurious cast for HPUX compiler. */
3835 ? ((enum machine_mode)
3836 TYPE_MODE (TREE_TYPE (to)))
3840 int_size_in_bytes (TREE_TYPE (tem)),
3841 get_alias_set (to));
3843 preserve_temp_slots (result);
3847 /* If the value is meaningful, convert RESULT to the proper mode.
3848 Otherwise, return nothing. */
3849 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3850 TYPE_MODE (TREE_TYPE (from)),
3852 TREE_UNSIGNED (TREE_TYPE (to)))
3857 /* If the rhs is a function call and its value is not an aggregate,
3858 call the function before we start to compute the lhs.
3859 This is needed for correct code for cases such as
3860 val = setjmp (buf) on machines where reference to val
3861 requires loading up part of an address in a separate insn.
3863 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3864 since it might be a promoted variable where the zero- or sign- extension
3865 needs to be done. Handling this in the normal way is safe because no
3866 computation is done before the call. */
3867 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3868 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3869 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3870 && GET_CODE (DECL_RTL (to)) == REG))
3875 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3877 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3879 /* Handle calls that return values in multiple non-contiguous locations.
3880 The Irix 6 ABI has examples of this. */
3881 if (GET_CODE (to_rtx) == PARALLEL)
3882 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3883 TYPE_ALIGN (TREE_TYPE (from)));
3884 else if (GET_MODE (to_rtx) == BLKmode)
3885 emit_block_move (to_rtx, value, expr_size (from),
3886 TYPE_ALIGN (TREE_TYPE (from)));
3889 #ifdef POINTERS_EXTEND_UNSIGNED
3890 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3891 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3892 value = convert_memory_address (GET_MODE (to_rtx), value);
3894 emit_move_insn (to_rtx, value);
3896 preserve_temp_slots (to_rtx);
3899 return want_value ? to_rtx : NULL_RTX;
3902 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3903 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3907 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3908 if (GET_CODE (to_rtx) == MEM)
3909 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3912 /* Don't move directly into a return register. */
3913 if (TREE_CODE (to) == RESULT_DECL
3914 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3919 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3921 if (GET_CODE (to_rtx) == PARALLEL)
3922 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3923 TYPE_ALIGN (TREE_TYPE (from)));
3925 emit_move_insn (to_rtx, temp);
3927 preserve_temp_slots (to_rtx);
3930 return want_value ? to_rtx : NULL_RTX;
3933 /* In case we are returning the contents of an object which overlaps
3934 the place the value is being stored, use a safe function when copying
3935 a value through a pointer into a structure value return block. */
3936 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3937 && current_function_returns_struct
3938 && !current_function_returns_pcc_struct)
3943 size = expr_size (from);
3944 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3945 EXPAND_MEMORY_USE_DONT);
3947 /* Copy the rights of the bitmap. */
3948 if (current_function_check_memory_usage)
3949 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3950 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3951 XEXP (from_rtx, 0), Pmode,
3952 convert_to_mode (TYPE_MODE (sizetype),
3953 size, TREE_UNSIGNED (sizetype)),
3954 TYPE_MODE (sizetype));
3956 #ifdef TARGET_MEM_FUNCTIONS
3957 emit_library_call (memmove_libfunc, LCT_NORMAL,
3958 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3959 XEXP (from_rtx, 0), Pmode,
3960 convert_to_mode (TYPE_MODE (sizetype),
3961 size, TREE_UNSIGNED (sizetype)),
3962 TYPE_MODE (sizetype));
3964 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3965 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3966 XEXP (to_rtx, 0), Pmode,
3967 convert_to_mode (TYPE_MODE (integer_type_node),
3968 size, TREE_UNSIGNED (integer_type_node)),
3969 TYPE_MODE (integer_type_node));
3972 preserve_temp_slots (to_rtx);
3975 return want_value ? to_rtx : NULL_RTX;
3978 /* Compute FROM and store the value in the rtx we got. */
3981 result = store_expr (from, to_rtx, want_value);
3982 preserve_temp_slots (result);
3985 return want_value ? result : NULL_RTX;
3988 /* Generate code for computing expression EXP,
3989 and storing the value into TARGET.
3990 TARGET may contain a QUEUED rtx.
3992 If WANT_VALUE is nonzero, return a copy of the value
3993 not in TARGET, so that we can be sure to use the proper
3994 value in a containing expression even if TARGET has something
3995 else stored in it. If possible, we copy the value through a pseudo
3996 and return that pseudo. Or, if the value is constant, we try to
3997 return the constant. In some cases, we return a pseudo
3998 copied *from* TARGET.
4000 If the mode is BLKmode then we may return TARGET itself.
4001 It turns out that in BLKmode it doesn't cause a problem.
4002 because C has no operators that could combine two different
4003 assignments into the same BLKmode object with different values
4004 with no sequence point. Will other languages need this to
4007 If WANT_VALUE is 0, we return NULL, to make sure
4008 to catch quickly any cases where the caller uses the value
4009 and fails to set WANT_VALUE. */
4012 store_expr (exp, target, want_value)
4014 register rtx target;
4018 int dont_return_target = 0;
4019 int dont_store_target = 0;
4021 if (TREE_CODE (exp) == COMPOUND_EXPR)
4023 /* Perform first part of compound expression, then assign from second
4025 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4027 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4029 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4031 /* For conditional expression, get safe form of the target. Then
4032 test the condition, doing the appropriate assignment on either
4033 side. This avoids the creation of unnecessary temporaries.
4034 For non-BLKmode, it is more efficient not to do this. */
4036 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4039 target = protect_from_queue (target, 1);
4041 do_pending_stack_adjust ();
4043 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4044 start_cleanup_deferral ();
4045 store_expr (TREE_OPERAND (exp, 1), target, 0);
4046 end_cleanup_deferral ();
4048 emit_jump_insn (gen_jump (lab2));
4051 start_cleanup_deferral ();
4052 store_expr (TREE_OPERAND (exp, 2), target, 0);
4053 end_cleanup_deferral ();
4058 return want_value ? target : NULL_RTX;
4060 else if (queued_subexp_p (target))
4061 /* If target contains a postincrement, let's not risk
4062 using it as the place to generate the rhs. */
4064 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4066 /* Expand EXP into a new pseudo. */
4067 temp = gen_reg_rtx (GET_MODE (target));
4068 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4071 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4073 /* If target is volatile, ANSI requires accessing the value
4074 *from* the target, if it is accessed. So make that happen.
4075 In no case return the target itself. */
4076 if (! MEM_VOLATILE_P (target) && want_value)
4077 dont_return_target = 1;
4079 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4080 && GET_MODE (target) != BLKmode)
4081 /* If target is in memory and caller wants value in a register instead,
4082 arrange that. Pass TARGET as target for expand_expr so that,
4083 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4084 We know expand_expr will not use the target in that case.
4085 Don't do this if TARGET is volatile because we are supposed
4086 to write it and then read it. */
4088 temp = expand_expr (exp, target, GET_MODE (target), 0);
4089 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4091 /* If TEMP is already in the desired TARGET, only copy it from
4092 memory and don't store it there again. */
4094 || (rtx_equal_p (temp, target)
4095 && ! side_effects_p (temp) && ! side_effects_p (target)))
4096 dont_store_target = 1;
4097 temp = copy_to_reg (temp);
4099 dont_return_target = 1;
4101 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4102 /* If this is an scalar in a register that is stored in a wider mode
4103 than the declared mode, compute the result into its declared mode
4104 and then convert to the wider mode. Our value is the computed
4107 /* If we don't want a value, we can do the conversion inside EXP,
4108 which will often result in some optimizations. Do the conversion
4109 in two steps: first change the signedness, if needed, then
4110 the extend. But don't do this if the type of EXP is a subtype
4111 of something else since then the conversion might involve
4112 more than just converting modes. */
4113 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4114 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4116 if (TREE_UNSIGNED (TREE_TYPE (exp))
4117 != SUBREG_PROMOTED_UNSIGNED_P (target))
4120 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4124 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4125 SUBREG_PROMOTED_UNSIGNED_P (target)),
4129 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4131 /* If TEMP is a volatile MEM and we want a result value, make
4132 the access now so it gets done only once. Likewise if
4133 it contains TARGET. */
4134 if (GET_CODE (temp) == MEM && want_value
4135 && (MEM_VOLATILE_P (temp)
4136 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4137 temp = copy_to_reg (temp);
4139 /* If TEMP is a VOIDmode constant, use convert_modes to make
4140 sure that we properly convert it. */
4141 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4142 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4143 TYPE_MODE (TREE_TYPE (exp)), temp,
4144 SUBREG_PROMOTED_UNSIGNED_P (target));
4146 convert_move (SUBREG_REG (target), temp,
4147 SUBREG_PROMOTED_UNSIGNED_P (target));
4149 /* If we promoted a constant, change the mode back down to match
4150 target. Otherwise, the caller might get confused by a result whose
4151 mode is larger than expected. */
4153 if (want_value && GET_MODE (temp) != GET_MODE (target)
4154 && GET_MODE (temp) != VOIDmode)
4156 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4157 SUBREG_PROMOTED_VAR_P (temp) = 1;
4158 SUBREG_PROMOTED_UNSIGNED_P (temp)
4159 = SUBREG_PROMOTED_UNSIGNED_P (target);
4162 return want_value ? temp : NULL_RTX;
4166 temp = expand_expr (exp, target, GET_MODE (target), 0);
4167 /* Return TARGET if it's a specified hardware register.
4168 If TARGET is a volatile mem ref, either return TARGET
4169 or return a reg copied *from* TARGET; ANSI requires this.
4171 Otherwise, if TEMP is not TARGET, return TEMP
4172 if it is constant (for efficiency),
4173 or if we really want the correct value. */
4174 if (!(target && GET_CODE (target) == REG
4175 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4176 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4177 && ! rtx_equal_p (temp, target)
4178 && (CONSTANT_P (temp) || want_value))
4179 dont_return_target = 1;
4182 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4183 the same as that of TARGET, adjust the constant. This is needed, for
4184 example, in case it is a CONST_DOUBLE and we want only a word-sized
4186 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4187 && TREE_CODE (exp) != ERROR_MARK
4188 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4189 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4190 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4192 if (current_function_check_memory_usage
4193 && GET_CODE (target) == MEM
4194 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4196 in_check_memory_usage = 1;
4197 if (GET_CODE (temp) == MEM)
4198 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4199 VOIDmode, 3, XEXP (target, 0), Pmode,
4200 XEXP (temp, 0), Pmode,
4201 expr_size (exp), TYPE_MODE (sizetype));
4203 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4204 VOIDmode, 3, XEXP (target, 0), Pmode,
4205 expr_size (exp), TYPE_MODE (sizetype),
4206 GEN_INT (MEMORY_USE_WO),
4207 TYPE_MODE (integer_type_node));
4208 in_check_memory_usage = 0;
4211 /* If value was not generated in the target, store it there.
4212 Convert the value to TARGET's type first if nec. */
4213 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4214 one or both of them are volatile memory refs, we have to distinguish
4216 - expand_expr has used TARGET. In this case, we must not generate
4217 another copy. This can be detected by TARGET being equal according
4219 - expand_expr has not used TARGET - that means that the source just
4220 happens to have the same RTX form. Since temp will have been created
4221 by expand_expr, it will compare unequal according to == .
4222 We must generate a copy in this case, to reach the correct number
4223 of volatile memory references. */
4225 if ((! rtx_equal_p (temp, target)
4226 || (temp != target && (side_effects_p (temp)
4227 || side_effects_p (target))))
4228 && TREE_CODE (exp) != ERROR_MARK
4229 && ! dont_store_target)
4231 target = protect_from_queue (target, 1);
4232 if (GET_MODE (temp) != GET_MODE (target)
4233 && GET_MODE (temp) != VOIDmode)
4235 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4236 if (dont_return_target)
4238 /* In this case, we will return TEMP,
4239 so make sure it has the proper mode.
4240 But don't forget to store the value into TARGET. */
4241 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4242 emit_move_insn (target, temp);
4245 convert_move (target, temp, unsignedp);
4248 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4250 /* Handle copying a string constant into an array.
4251 The string constant may be shorter than the array.
4252 So copy just the string's actual length, and clear the rest. */
4256 /* Get the size of the data type of the string,
4257 which is actually the size of the target. */
4258 size = expr_size (exp);
4259 if (GET_CODE (size) == CONST_INT
4260 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4261 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4264 /* Compute the size of the data to copy from the string. */
4266 = size_binop (MIN_EXPR,
4267 make_tree (sizetype, size),
4268 size_int (TREE_STRING_LENGTH (exp)));
4269 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4270 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4274 /* Copy that much. */
4275 emit_block_move (target, temp, copy_size_rtx,
4276 TYPE_ALIGN (TREE_TYPE (exp)));
4278 /* Figure out how much is left in TARGET that we have to clear.
4279 Do all calculations in ptr_mode. */
4281 addr = XEXP (target, 0);
4282 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4284 if (GET_CODE (copy_size_rtx) == CONST_INT)
4286 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4287 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4289 (unsigned int) (BITS_PER_UNIT
4290 * (INTVAL (copy_size_rtx)
4291 & - INTVAL (copy_size_rtx))));
4295 addr = force_reg (ptr_mode, addr);
4296 addr = expand_binop (ptr_mode, add_optab, addr,
4297 copy_size_rtx, NULL_RTX, 0,
4300 size = expand_binop (ptr_mode, sub_optab, size,
4301 copy_size_rtx, NULL_RTX, 0,
4304 align = BITS_PER_UNIT;
4305 label = gen_label_rtx ();
4306 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4307 GET_MODE (size), 0, 0, label);
4309 align = MIN (align, expr_align (copy_size));
4311 if (size != const0_rtx)
4313 rtx dest = gen_rtx_MEM (BLKmode, addr);
4315 MEM_COPY_ATTRIBUTES (dest, target);
4317 /* Be sure we can write on ADDR. */
4318 in_check_memory_usage = 1;
4319 if (current_function_check_memory_usage)
4320 emit_library_call (chkr_check_addr_libfunc,
4321 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4323 size, TYPE_MODE (sizetype),
4324 GEN_INT (MEMORY_USE_WO),
4325 TYPE_MODE (integer_type_node));
4326 in_check_memory_usage = 0;
4327 clear_storage (dest, size, align);
4334 /* Handle calls that return values in multiple non-contiguous locations.
4335 The Irix 6 ABI has examples of this. */
4336 else if (GET_CODE (target) == PARALLEL)
4337 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4338 TYPE_ALIGN (TREE_TYPE (exp)));
4339 else if (GET_MODE (temp) == BLKmode)
4340 emit_block_move (target, temp, expr_size (exp),
4341 TYPE_ALIGN (TREE_TYPE (exp)));
4343 emit_move_insn (target, temp);
4346 /* If we don't want a value, return NULL_RTX. */
4350 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4351 ??? The latter test doesn't seem to make sense. */
4352 else if (dont_return_target && GET_CODE (temp) != MEM)
4355 /* Return TARGET itself if it is a hard register. */
4356 else if (want_value && GET_MODE (target) != BLKmode
4357 && ! (GET_CODE (target) == REG
4358 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4359 return copy_to_reg (target);
4365 /* Return 1 if EXP just contains zeros. */
4373 switch (TREE_CODE (exp))
4377 case NON_LVALUE_EXPR:
4378 return is_zeros_p (TREE_OPERAND (exp, 0));
4381 return integer_zerop (exp);
4385 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4388 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4391 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4392 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4393 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4394 if (! is_zeros_p (TREE_VALUE (elt)))
4404 /* Return 1 if EXP contains mostly (3/4) zeros. */
4407 mostly_zeros_p (exp)
4410 if (TREE_CODE (exp) == CONSTRUCTOR)
4412 int elts = 0, zeros = 0;
4413 tree elt = CONSTRUCTOR_ELTS (exp);
4414 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4416 /* If there are no ranges of true bits, it is all zero. */
4417 return elt == NULL_TREE;
4419 for (; elt; elt = TREE_CHAIN (elt))
4421 /* We do not handle the case where the index is a RANGE_EXPR,
4422 so the statistic will be somewhat inaccurate.
4423 We do make a more accurate count in store_constructor itself,
4424 so since this function is only used for nested array elements,
4425 this should be close enough. */
4426 if (mostly_zeros_p (TREE_VALUE (elt)))
4431 return 4 * zeros >= 3 * elts;
4434 return is_zeros_p (exp);
4437 /* Helper function for store_constructor.
4438 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4439 TYPE is the type of the CONSTRUCTOR, not the element type.
4440 ALIGN and CLEARED are as for store_constructor.
4441 ALIAS_SET is the alias set to use for any stores.
4443 This provides a recursive shortcut back to store_constructor when it isn't
4444 necessary to go through store_field. This is so that we can pass through
4445 the cleared field to let store_constructor know that we may not have to
4446 clear a substructure if the outer structure has already been cleared. */
4449 store_constructor_field (target, bitsize, bitpos,
4450 mode, exp, type, align, cleared, alias_set)
4452 unsigned HOST_WIDE_INT bitsize;
4453 HOST_WIDE_INT bitpos;
4454 enum machine_mode mode;
4460 if (TREE_CODE (exp) == CONSTRUCTOR
4461 && bitpos % BITS_PER_UNIT == 0
4462 /* If we have a non-zero bitpos for a register target, then we just
4463 let store_field do the bitfield handling. This is unlikely to
4464 generate unnecessary clear instructions anyways. */
4465 && (bitpos == 0 || GET_CODE (target) == MEM))
4469 = adjust_address (target,
4470 GET_MODE (target) == BLKmode
4472 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4473 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4476 /* Show the alignment may no longer be what it was and update the alias
4477 set, if required. */
4479 align = MIN (align, (unsigned int) bitpos & - bitpos);
4480 if (GET_CODE (target) == MEM)
4481 MEM_ALIAS_SET (target) = alias_set;
4483 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4486 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4487 int_size_in_bytes (type), alias_set);
4490 /* Store the value of constructor EXP into the rtx TARGET.
4491 TARGET is either a REG or a MEM.
4492 ALIGN is the maximum known alignment for TARGET.
4493 CLEARED is true if TARGET is known to have been zero'd.
4494 SIZE is the number of bytes of TARGET we are allowed to modify: this
4495 may not be the same as the size of EXP if we are assigning to a field
4496 which has been packed to exclude padding bits. */
4499 store_constructor (exp, target, align, cleared, size)
4506 tree type = TREE_TYPE (exp);
4507 #ifdef WORD_REGISTER_OPERATIONS
4508 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4511 /* We know our target cannot conflict, since safe_from_p has been called. */
4513 /* Don't try copying piece by piece into a hard register
4514 since that is vulnerable to being clobbered by EXP.
4515 Instead, construct in a pseudo register and then copy it all. */
4516 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4518 rtx temp = gen_reg_rtx (GET_MODE (target));
4519 store_constructor (exp, temp, align, cleared, size);
4520 emit_move_insn (target, temp);
4525 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4526 || TREE_CODE (type) == QUAL_UNION_TYPE)
4530 /* Inform later passes that the whole union value is dead. */
4531 if ((TREE_CODE (type) == UNION_TYPE
4532 || TREE_CODE (type) == QUAL_UNION_TYPE)
4535 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4537 /* If the constructor is empty, clear the union. */
4538 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4539 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4542 /* If we are building a static constructor into a register,
4543 set the initial value as zero so we can fold the value into
4544 a constant. But if more than one register is involved,
4545 this probably loses. */
4546 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4547 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4550 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4555 /* If the constructor has fewer fields than the structure
4556 or if we are initializing the structure to mostly zeros,
4557 clear the whole structure first. Don't do this if TARGET is a
4558 register whose mode size isn't equal to SIZE since clear_storage
4559 can't handle this case. */
4561 && ((list_length (CONSTRUCTOR_ELTS (exp))
4562 != fields_length (type))
4563 || mostly_zeros_p (exp))
4564 && (GET_CODE (target) != REG
4565 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4568 clear_storage (target, GEN_INT (size), align);
4573 /* Inform later passes that the old value is dead. */
4574 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4576 /* Store each element of the constructor into
4577 the corresponding field of TARGET. */
4579 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4581 register tree field = TREE_PURPOSE (elt);
4582 #ifdef WORD_REGISTER_OPERATIONS
4583 tree value = TREE_VALUE (elt);
4585 register enum machine_mode mode;
4586 HOST_WIDE_INT bitsize;
4587 HOST_WIDE_INT bitpos = 0;
4590 rtx to_rtx = target;
4592 /* Just ignore missing fields.
4593 We cleared the whole structure, above,
4594 if any fields are missing. */
4598 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4601 if (host_integerp (DECL_SIZE (field), 1))
4602 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4606 unsignedp = TREE_UNSIGNED (field);
4607 mode = DECL_MODE (field);
4608 if (DECL_BIT_FIELD (field))
4611 offset = DECL_FIELD_OFFSET (field);
4612 if (host_integerp (offset, 0)
4613 && host_integerp (bit_position (field), 0))
4615 bitpos = int_bit_position (field);
4619 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4625 if (contains_placeholder_p (offset))
4626 offset = build (WITH_RECORD_EXPR, sizetype,
4627 offset, make_tree (TREE_TYPE (exp), target));
4629 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4630 if (GET_CODE (to_rtx) != MEM)
4633 if (GET_MODE (offset_rtx) != ptr_mode)
4635 #ifdef POINTERS_EXTEND_UNSIGNED
4636 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4638 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4643 = change_address (to_rtx, VOIDmode,
4644 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4645 force_reg (ptr_mode,
4647 align = DECL_OFFSET_ALIGN (field);
4650 if (TREE_READONLY (field))
4652 if (GET_CODE (to_rtx) == MEM)
4653 to_rtx = copy_rtx (to_rtx);
4655 RTX_UNCHANGING_P (to_rtx) = 1;
4658 #ifdef WORD_REGISTER_OPERATIONS
4659 /* If this initializes a field that is smaller than a word, at the
4660 start of a word, try to widen it to a full word.
4661 This special case allows us to output C++ member function
4662 initializations in a form that the optimizers can understand. */
4663 if (GET_CODE (target) == REG
4664 && bitsize < BITS_PER_WORD
4665 && bitpos % BITS_PER_WORD == 0
4666 && GET_MODE_CLASS (mode) == MODE_INT
4667 && TREE_CODE (value) == INTEGER_CST
4669 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4671 tree type = TREE_TYPE (value);
4672 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4674 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4675 value = convert (type, value);
4677 if (BYTES_BIG_ENDIAN)
4679 = fold (build (LSHIFT_EXPR, type, value,
4680 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4681 bitsize = BITS_PER_WORD;
4685 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4686 TREE_VALUE (elt), type, align, cleared,
4687 (DECL_NONADDRESSABLE_P (field)
4688 && GET_CODE (to_rtx) == MEM)
4689 ? MEM_ALIAS_SET (to_rtx)
4690 : get_alias_set (TREE_TYPE (field)));
4693 else if (TREE_CODE (type) == ARRAY_TYPE)
4698 tree domain = TYPE_DOMAIN (type);
4699 tree elttype = TREE_TYPE (type);
4700 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4701 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4702 HOST_WIDE_INT minelt = 0;
4703 HOST_WIDE_INT maxelt = 0;
4705 /* If we have constant bounds for the range of the type, get them. */
4708 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4709 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4712 /* If the constructor has fewer elements than the array,
4713 clear the whole array first. Similarly if this is
4714 static constructor of a non-BLKmode object. */
4715 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4719 HOST_WIDE_INT count = 0, zero_count = 0;
4720 need_to_clear = ! const_bounds_p;
4722 /* This loop is a more accurate version of the loop in
4723 mostly_zeros_p (it handles RANGE_EXPR in an index).
4724 It is also needed to check for missing elements. */
4725 for (elt = CONSTRUCTOR_ELTS (exp);
4726 elt != NULL_TREE && ! need_to_clear;
4727 elt = TREE_CHAIN (elt))
4729 tree index = TREE_PURPOSE (elt);
4730 HOST_WIDE_INT this_node_count;
4732 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4734 tree lo_index = TREE_OPERAND (index, 0);
4735 tree hi_index = TREE_OPERAND (index, 1);
4737 if (! host_integerp (lo_index, 1)
4738 || ! host_integerp (hi_index, 1))
4744 this_node_count = (tree_low_cst (hi_index, 1)
4745 - tree_low_cst (lo_index, 1) + 1);
4748 this_node_count = 1;
4750 count += this_node_count;
4751 if (mostly_zeros_p (TREE_VALUE (elt)))
4752 zero_count += this_node_count;
4755 /* Clear the entire array first if there are any missing elements,
4756 or if the incidence of zero elements is >= 75%. */
4758 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4762 if (need_to_clear && size > 0)
4765 clear_storage (target, GEN_INT (size), align);
4769 /* Inform later passes that the old value is dead. */
4770 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4772 /* Store each element of the constructor into
4773 the corresponding element of TARGET, determined
4774 by counting the elements. */
4775 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4777 elt = TREE_CHAIN (elt), i++)
4779 register enum machine_mode mode;
4780 HOST_WIDE_INT bitsize;
4781 HOST_WIDE_INT bitpos;
4783 tree value = TREE_VALUE (elt);
4784 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4785 tree index = TREE_PURPOSE (elt);
4786 rtx xtarget = target;
4788 if (cleared && is_zeros_p (value))
4791 unsignedp = TREE_UNSIGNED (elttype);
4792 mode = TYPE_MODE (elttype);
4793 if (mode == BLKmode)
4794 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4795 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4798 bitsize = GET_MODE_BITSIZE (mode);
4800 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4802 tree lo_index = TREE_OPERAND (index, 0);
4803 tree hi_index = TREE_OPERAND (index, 1);
4804 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4805 struct nesting *loop;
4806 HOST_WIDE_INT lo, hi, count;
4809 /* If the range is constant and "small", unroll the loop. */
4811 && host_integerp (lo_index, 0)
4812 && host_integerp (hi_index, 0)
4813 && (lo = tree_low_cst (lo_index, 0),
4814 hi = tree_low_cst (hi_index, 0),
4815 count = hi - lo + 1,
4816 (GET_CODE (target) != MEM
4818 || (host_integerp (TYPE_SIZE (elttype), 1)
4819 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4822 lo -= minelt; hi -= minelt;
4823 for (; lo <= hi; lo++)
4825 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4826 store_constructor_field
4827 (target, bitsize, bitpos, mode, value, type, align,
4829 TYPE_NONALIASED_COMPONENT (type)
4830 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4835 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4836 loop_top = gen_label_rtx ();
4837 loop_end = gen_label_rtx ();
4839 unsignedp = TREE_UNSIGNED (domain);
4841 index = build_decl (VAR_DECL, NULL_TREE, domain);
4844 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4846 SET_DECL_RTL (index, index_r);
4847 if (TREE_CODE (value) == SAVE_EXPR
4848 && SAVE_EXPR_RTL (value) == 0)
4850 /* Make sure value gets expanded once before the
4852 expand_expr (value, const0_rtx, VOIDmode, 0);
4855 store_expr (lo_index, index_r, 0);
4856 loop = expand_start_loop (0);
4858 /* Assign value to element index. */
4860 = convert (ssizetype,
4861 fold (build (MINUS_EXPR, TREE_TYPE (index),
4862 index, TYPE_MIN_VALUE (domain))));
4863 position = size_binop (MULT_EXPR, position,
4865 TYPE_SIZE_UNIT (elttype)));
4867 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4868 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4869 xtarget = change_address (target, mode, addr);
4870 if (TREE_CODE (value) == CONSTRUCTOR)
4871 store_constructor (value, xtarget, align, cleared,
4872 bitsize / BITS_PER_UNIT);
4874 store_expr (value, xtarget, 0);
4876 expand_exit_loop_if_false (loop,
4877 build (LT_EXPR, integer_type_node,
4880 expand_increment (build (PREINCREMENT_EXPR,
4882 index, integer_one_node), 0, 0);
4884 emit_label (loop_end);
4887 else if ((index != 0 && ! host_integerp (index, 0))
4888 || ! host_integerp (TYPE_SIZE (elttype), 1))
4894 index = ssize_int (1);
4897 index = convert (ssizetype,
4898 fold (build (MINUS_EXPR, index,
4899 TYPE_MIN_VALUE (domain))));
4901 position = size_binop (MULT_EXPR, index,
4903 TYPE_SIZE_UNIT (elttype)));
4904 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4905 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4906 xtarget = change_address (target, mode, addr);
4907 store_expr (value, xtarget, 0);
4912 bitpos = ((tree_low_cst (index, 0) - minelt)
4913 * tree_low_cst (TYPE_SIZE (elttype), 1));
4915 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4917 store_constructor_field (target, bitsize, bitpos, mode, value,
4918 type, align, cleared,
4919 TYPE_NONALIASED_COMPONENT (type)
4920 && GET_CODE (target) == MEM
4921 ? MEM_ALIAS_SET (target) :
4922 get_alias_set (elttype));
4928 /* Set constructor assignments. */
4929 else if (TREE_CODE (type) == SET_TYPE)
4931 tree elt = CONSTRUCTOR_ELTS (exp);
4932 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4933 tree domain = TYPE_DOMAIN (type);
4934 tree domain_min, domain_max, bitlength;
4936 /* The default implementation strategy is to extract the constant
4937 parts of the constructor, use that to initialize the target,
4938 and then "or" in whatever non-constant ranges we need in addition.
4940 If a large set is all zero or all ones, it is
4941 probably better to set it using memset (if available) or bzero.
4942 Also, if a large set has just a single range, it may also be
4943 better to first clear all the first clear the set (using
4944 bzero/memset), and set the bits we want. */
4946 /* Check for all zeros. */
4947 if (elt == NULL_TREE && size > 0)
4950 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4954 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4955 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4956 bitlength = size_binop (PLUS_EXPR,
4957 size_diffop (domain_max, domain_min),
4960 nbits = tree_low_cst (bitlength, 1);
4962 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4963 are "complicated" (more than one range), initialize (the
4964 constant parts) by copying from a constant. */
4965 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4966 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4968 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4969 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4970 char *bit_buffer = (char *) alloca (nbits);
4971 HOST_WIDE_INT word = 0;
4972 unsigned int bit_pos = 0;
4973 unsigned int ibit = 0;
4974 unsigned int offset = 0; /* In bytes from beginning of set. */
4976 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4979 if (bit_buffer[ibit])
4981 if (BYTES_BIG_ENDIAN)
4982 word |= (1 << (set_word_size - 1 - bit_pos));
4984 word |= 1 << bit_pos;
4988 if (bit_pos >= set_word_size || ibit == nbits)
4990 if (word != 0 || ! cleared)
4992 rtx datum = GEN_INT (word);
4995 /* The assumption here is that it is safe to use
4996 XEXP if the set is multi-word, but not if
4997 it's single-word. */
4998 if (GET_CODE (target) == MEM)
4999 to_rtx = adjust_address (target, mode, offset);
5000 else if (offset == 0)
5004 emit_move_insn (to_rtx, datum);
5011 offset += set_word_size / BITS_PER_UNIT;
5016 /* Don't bother clearing storage if the set is all ones. */
5017 if (TREE_CHAIN (elt) != NULL_TREE
5018 || (TREE_PURPOSE (elt) == NULL_TREE
5020 : ( ! host_integerp (TREE_VALUE (elt), 0)
5021 || ! host_integerp (TREE_PURPOSE (elt), 0)
5022 || (tree_low_cst (TREE_VALUE (elt), 0)
5023 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5024 != (HOST_WIDE_INT) nbits))))
5025 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5027 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5029 /* Start of range of element or NULL. */
5030 tree startbit = TREE_PURPOSE (elt);
5031 /* End of range of element, or element value. */
5032 tree endbit = TREE_VALUE (elt);
5033 #ifdef TARGET_MEM_FUNCTIONS
5034 HOST_WIDE_INT startb, endb;
5036 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5038 bitlength_rtx = expand_expr (bitlength,
5039 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5041 /* Handle non-range tuple element like [ expr ]. */
5042 if (startbit == NULL_TREE)
5044 startbit = save_expr (endbit);
5048 startbit = convert (sizetype, startbit);
5049 endbit = convert (sizetype, endbit);
5050 if (! integer_zerop (domain_min))
5052 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5053 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5055 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5056 EXPAND_CONST_ADDRESS);
5057 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5058 EXPAND_CONST_ADDRESS);
5064 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5067 emit_move_insn (targetx, target);
5070 else if (GET_CODE (target) == MEM)
5075 #ifdef TARGET_MEM_FUNCTIONS
5076 /* Optimization: If startbit and endbit are
5077 constants divisible by BITS_PER_UNIT,
5078 call memset instead. */
5079 if (TREE_CODE (startbit) == INTEGER_CST
5080 && TREE_CODE (endbit) == INTEGER_CST
5081 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5082 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5084 emit_library_call (memset_libfunc, LCT_NORMAL,
5086 plus_constant (XEXP (targetx, 0),
5087 startb / BITS_PER_UNIT),
5089 constm1_rtx, TYPE_MODE (integer_type_node),
5090 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5091 TYPE_MODE (sizetype));
5095 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5096 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5097 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5098 startbit_rtx, TYPE_MODE (sizetype),
5099 endbit_rtx, TYPE_MODE (sizetype));
5102 emit_move_insn (target, targetx);
5110 /* Store the value of EXP (an expression tree)
5111 into a subfield of TARGET which has mode MODE and occupies
5112 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5113 If MODE is VOIDmode, it means that we are storing into a bit-field.
5115 If VALUE_MODE is VOIDmode, return nothing in particular.
5116 UNSIGNEDP is not used in this case.
5118 Otherwise, return an rtx for the value stored. This rtx
5119 has mode VALUE_MODE if that is convenient to do.
5120 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5122 ALIGN is the alignment that TARGET is known to have.
5123 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5125 ALIAS_SET is the alias set for the destination. This value will
5126 (in general) be different from that for TARGET, since TARGET is a
5127 reference to the containing structure. */
5130 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5131 unsignedp, align, total_size, alias_set)
5133 HOST_WIDE_INT bitsize;
5134 HOST_WIDE_INT bitpos;
5135 enum machine_mode mode;
5137 enum machine_mode value_mode;
5140 HOST_WIDE_INT total_size;
5143 HOST_WIDE_INT width_mask = 0;
5145 if (TREE_CODE (exp) == ERROR_MARK)
5148 /* If we have nothing to store, do nothing unless the expression has
5151 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5153 if (bitsize < HOST_BITS_PER_WIDE_INT)
5154 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5156 /* If we are storing into an unaligned field of an aligned union that is
5157 in a register, we may have the mode of TARGET being an integer mode but
5158 MODE == BLKmode. In that case, get an aligned object whose size and
5159 alignment are the same as TARGET and store TARGET into it (we can avoid
5160 the store if the field being stored is the entire width of TARGET). Then
5161 call ourselves recursively to store the field into a BLKmode version of
5162 that object. Finally, load from the object into TARGET. This is not
5163 very efficient in general, but should only be slightly more expensive
5164 than the otherwise-required unaligned accesses. Perhaps this can be
5165 cleaned up later. */
5168 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5172 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5175 rtx blk_object = copy_rtx (object);
5177 PUT_MODE (blk_object, BLKmode);
5179 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5180 emit_move_insn (object, target);
5182 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5183 align, total_size, alias_set);
5185 /* Even though we aren't returning target, we need to
5186 give it the updated value. */
5187 emit_move_insn (target, object);
5192 if (GET_CODE (target) == CONCAT)
5194 /* We're storing into a struct containing a single __complex. */
5198 return store_expr (exp, target, 0);
5201 /* If the structure is in a register or if the component
5202 is a bit field, we cannot use addressing to access it.
5203 Use bit-field techniques or SUBREG to store in it. */
5205 if (mode == VOIDmode
5206 || (mode != BLKmode && ! direct_store[(int) mode]
5207 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5208 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5209 || GET_CODE (target) == REG
5210 || GET_CODE (target) == SUBREG
5211 /* If the field isn't aligned enough to store as an ordinary memref,
5212 store it as a bit field. */
5213 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5214 && (align < GET_MODE_ALIGNMENT (mode)
5215 || bitpos % GET_MODE_ALIGNMENT (mode)))
5216 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5217 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5218 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5219 /* If the RHS and field are a constant size and the size of the
5220 RHS isn't the same size as the bitfield, we must use bitfield
5223 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5224 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5226 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5228 /* If BITSIZE is narrower than the size of the type of EXP
5229 we will be narrowing TEMP. Normally, what's wanted are the
5230 low-order bits. However, if EXP's type is a record and this is
5231 big-endian machine, we want the upper BITSIZE bits. */
5232 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5233 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5234 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5235 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5236 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5240 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5242 if (mode != VOIDmode && mode != BLKmode
5243 && mode != TYPE_MODE (TREE_TYPE (exp)))
5244 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5246 /* If the modes of TARGET and TEMP are both BLKmode, both
5247 must be in memory and BITPOS must be aligned on a byte
5248 boundary. If so, we simply do a block copy. */
5249 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5251 unsigned int exp_align = expr_align (exp);
5253 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5254 || bitpos % BITS_PER_UNIT != 0)
5257 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5259 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5260 align = MIN (exp_align, align);
5262 /* Find an alignment that is consistent with the bit position. */
5263 while ((bitpos % align) != 0)
5266 emit_block_move (target, temp,
5267 bitsize == -1 ? expr_size (exp)
5268 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5272 return value_mode == VOIDmode ? const0_rtx : target;
5275 /* Store the value in the bitfield. */
5276 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5277 if (value_mode != VOIDmode)
5279 /* The caller wants an rtx for the value. */
5280 /* If possible, avoid refetching from the bitfield itself. */
5282 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5285 enum machine_mode tmode;
5288 return expand_and (temp,
5292 GET_MODE (temp) == VOIDmode
5294 : GET_MODE (temp))), NULL_RTX);
5295 tmode = GET_MODE (temp);
5296 if (tmode == VOIDmode)
5298 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5299 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5300 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5302 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5303 NULL_RTX, value_mode, 0, align,
5310 rtx addr = XEXP (target, 0);
5313 /* If a value is wanted, it must be the lhs;
5314 so make the address stable for multiple use. */
5316 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5317 && ! CONSTANT_ADDRESS_P (addr)
5318 /* A frame-pointer reference is already stable. */
5319 && ! (GET_CODE (addr) == PLUS
5320 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5321 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5322 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5323 target = replace_equiv_address (target, copy_to_reg (addr));
5325 /* Now build a reference to just the desired component. */
5327 to_rtx = copy_rtx (adjust_address (target, mode,
5328 bitpos / BITS_PER_UNIT));
5330 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5331 /* If the address of the structure varies, then it might be on
5332 the stack. And, stack slots may be shared across scopes.
5333 So, two different structures, of different types, can end up
5334 at the same location. We will give the structures alias set
5335 zero; here we must be careful not to give non-zero alias sets
5337 if (!rtx_varies_p (addr, /*for_alias=*/0))
5338 MEM_ALIAS_SET (to_rtx) = alias_set;
5340 MEM_ALIAS_SET (to_rtx) = 0;
5342 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5346 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5347 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5348 codes and find the ultimate containing object, which we return.
5350 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5351 bit position, and *PUNSIGNEDP to the signedness of the field.
5352 If the position of the field is variable, we store a tree
5353 giving the variable offset (in units) in *POFFSET.
5354 This offset is in addition to the bit position.
5355 If the position is not variable, we store 0 in *POFFSET.
5356 We set *PALIGNMENT to the alignment of the address that will be
5357 computed. This is the alignment of the thing we return if *POFFSET
5358 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5360 If any of the extraction expressions is volatile,
5361 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5363 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5364 is a mode that can be used to access the field. In that case, *PBITSIZE
5367 If the field describes a variable-sized object, *PMODE is set to
5368 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5369 this case, but the address of the object can be found. */
5372 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5373 punsignedp, pvolatilep, palignment)
5375 HOST_WIDE_INT *pbitsize;
5376 HOST_WIDE_INT *pbitpos;
5378 enum machine_mode *pmode;
5381 unsigned int *palignment;
5384 enum machine_mode mode = VOIDmode;
5385 tree offset = size_zero_node;
5386 tree bit_offset = bitsize_zero_node;
5387 unsigned int alignment = BIGGEST_ALIGNMENT;
5390 /* First get the mode, signedness, and size. We do this from just the
5391 outermost expression. */
5392 if (TREE_CODE (exp) == COMPONENT_REF)
5394 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5395 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5396 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5398 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5400 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5402 size_tree = TREE_OPERAND (exp, 1);
5403 *punsignedp = TREE_UNSIGNED (exp);
5407 mode = TYPE_MODE (TREE_TYPE (exp));
5408 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5410 if (mode == BLKmode)
5411 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5413 *pbitsize = GET_MODE_BITSIZE (mode);
5418 if (! host_integerp (size_tree, 1))
5419 mode = BLKmode, *pbitsize = -1;
5421 *pbitsize = tree_low_cst (size_tree, 1);
5424 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5425 and find the ultimate containing object. */
5428 if (TREE_CODE (exp) == BIT_FIELD_REF)
5429 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5430 else if (TREE_CODE (exp) == COMPONENT_REF)
5432 tree field = TREE_OPERAND (exp, 1);
5433 tree this_offset = DECL_FIELD_OFFSET (field);
5435 /* If this field hasn't been filled in yet, don't go
5436 past it. This should only happen when folding expressions
5437 made during type construction. */
5438 if (this_offset == 0)
5440 else if (! TREE_CONSTANT (this_offset)
5441 && contains_placeholder_p (this_offset))
5442 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5444 offset = size_binop (PLUS_EXPR, offset, this_offset);
5445 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5446 DECL_FIELD_BIT_OFFSET (field));
5448 if (! host_integerp (offset, 0))
5449 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5452 else if (TREE_CODE (exp) == ARRAY_REF
5453 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5455 tree index = TREE_OPERAND (exp, 1);
5456 tree array = TREE_OPERAND (exp, 0);
5457 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5458 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5459 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5461 /* We assume all arrays have sizes that are a multiple of a byte.
5462 First subtract the lower bound, if any, in the type of the
5463 index, then convert to sizetype and multiply by the size of the
5465 if (low_bound != 0 && ! integer_zerop (low_bound))
5466 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5469 /* If the index has a self-referential type, pass it to a
5470 WITH_RECORD_EXPR; if the component size is, pass our
5471 component to one. */
5472 if (! TREE_CONSTANT (index)
5473 && contains_placeholder_p (index))
5474 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5475 if (! TREE_CONSTANT (unit_size)
5476 && contains_placeholder_p (unit_size))
5477 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5479 offset = size_binop (PLUS_EXPR, offset,
5480 size_binop (MULT_EXPR,
5481 convert (sizetype, index),
5485 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5486 && ! ((TREE_CODE (exp) == NOP_EXPR
5487 || TREE_CODE (exp) == CONVERT_EXPR)
5488 && (TYPE_MODE (TREE_TYPE (exp))
5489 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5492 /* If any reference in the chain is volatile, the effect is volatile. */
5493 if (TREE_THIS_VOLATILE (exp))
5496 /* If the offset is non-constant already, then we can't assume any
5497 alignment more than the alignment here. */
5498 if (! TREE_CONSTANT (offset))
5499 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5501 exp = TREE_OPERAND (exp, 0);
5505 alignment = MIN (alignment, DECL_ALIGN (exp));
5506 else if (TREE_TYPE (exp) != 0)
5507 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5509 /* If OFFSET is constant, see if we can return the whole thing as a
5510 constant bit position. Otherwise, split it up. */
5511 if (host_integerp (offset, 0)
5512 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5514 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5515 && host_integerp (tem, 0))
5516 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5518 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5521 *palignment = alignment;
5525 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5527 static enum memory_use_mode
5528 get_memory_usage_from_modifier (modifier)
5529 enum expand_modifier modifier;
5535 return MEMORY_USE_RO;
5537 case EXPAND_MEMORY_USE_WO:
5538 return MEMORY_USE_WO;
5540 case EXPAND_MEMORY_USE_RW:
5541 return MEMORY_USE_RW;
5543 case EXPAND_MEMORY_USE_DONT:
5544 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5545 MEMORY_USE_DONT, because they are modifiers to a call of
5546 expand_expr in the ADDR_EXPR case of expand_expr. */
5547 case EXPAND_CONST_ADDRESS:
5548 case EXPAND_INITIALIZER:
5549 return MEMORY_USE_DONT;
5550 case EXPAND_MEMORY_USE_BAD:
5556 /* Given an rtx VALUE that may contain additions and multiplications, return
5557 an equivalent value that just refers to a register, memory, or constant.
5558 This is done by generating instructions to perform the arithmetic and
5559 returning a pseudo-register containing the value.
5561 The returned value may be a REG, SUBREG, MEM or constant. */
5564 force_operand (value, target)
5567 register optab binoptab = 0;
5568 /* Use a temporary to force order of execution of calls to
5572 /* Use subtarget as the target for operand 0 of a binary operation. */
5573 register rtx subtarget = get_subtarget (target);
5575 /* Check for a PIC address load. */
5577 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5578 && XEXP (value, 0) == pic_offset_table_rtx
5579 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5580 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5581 || GET_CODE (XEXP (value, 1)) == CONST))
5584 subtarget = gen_reg_rtx (GET_MODE (value));
5585 emit_move_insn (subtarget, value);
5589 if (GET_CODE (value) == PLUS)
5590 binoptab = add_optab;
5591 else if (GET_CODE (value) == MINUS)
5592 binoptab = sub_optab;
5593 else if (GET_CODE (value) == MULT)
5595 op2 = XEXP (value, 1);
5596 if (!CONSTANT_P (op2)
5597 && !(GET_CODE (op2) == REG && op2 != subtarget))
5599 tmp = force_operand (XEXP (value, 0), subtarget);
5600 return expand_mult (GET_MODE (value), tmp,
5601 force_operand (op2, NULL_RTX),
5607 op2 = XEXP (value, 1);
5608 if (!CONSTANT_P (op2)
5609 && !(GET_CODE (op2) == REG && op2 != subtarget))
5611 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5613 binoptab = add_optab;
5614 op2 = negate_rtx (GET_MODE (value), op2);
5617 /* Check for an addition with OP2 a constant integer and our first
5618 operand a PLUS of a virtual register and something else. In that
5619 case, we want to emit the sum of the virtual register and the
5620 constant first and then add the other value. This allows virtual
5621 register instantiation to simply modify the constant rather than
5622 creating another one around this addition. */
5623 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5624 && GET_CODE (XEXP (value, 0)) == PLUS
5625 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5626 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5627 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5629 rtx temp = expand_binop (GET_MODE (value), binoptab,
5630 XEXP (XEXP (value, 0), 0), op2,
5631 subtarget, 0, OPTAB_LIB_WIDEN);
5632 return expand_binop (GET_MODE (value), binoptab, temp,
5633 force_operand (XEXP (XEXP (value, 0), 1), 0),
5634 target, 0, OPTAB_LIB_WIDEN);
5637 tmp = force_operand (XEXP (value, 0), subtarget);
5638 return expand_binop (GET_MODE (value), binoptab, tmp,
5639 force_operand (op2, NULL_RTX),
5640 target, 0, OPTAB_LIB_WIDEN);
5641 /* We give UNSIGNEDP = 0 to expand_binop
5642 because the only operations we are expanding here are signed ones. */
5647 /* Subroutine of expand_expr:
5648 save the non-copied parts (LIST) of an expr (LHS), and return a list
5649 which can restore these values to their previous values,
5650 should something modify their storage. */
5653 save_noncopied_parts (lhs, list)
5660 for (tail = list; tail; tail = TREE_CHAIN (tail))
5661 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5662 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5665 tree part = TREE_VALUE (tail);
5666 tree part_type = TREE_TYPE (part);
5667 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5669 = assign_temp (build_qualified_type (part_type,
5670 (TYPE_QUALS (part_type)
5671 | TYPE_QUAL_CONST)),
5674 parts = tree_cons (to_be_saved,
5675 build (RTL_EXPR, part_type, NULL_TREE,
5676 (tree) validize_mem (target)),
5678 store_expr (TREE_PURPOSE (parts),
5679 RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5684 /* Subroutine of expand_expr:
5685 record the non-copied parts (LIST) of an expr (LHS), and return a list
5686 which specifies the initial values of these parts. */
5689 init_noncopied_parts (lhs, list)
5696 for (tail = list; tail; tail = TREE_CHAIN (tail))
5697 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5698 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5699 else if (TREE_PURPOSE (tail))
5701 tree part = TREE_VALUE (tail);
5702 tree part_type = TREE_TYPE (part);
5703 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5704 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5709 /* Subroutine of expand_expr: return nonzero iff there is no way that
5710 EXP can reference X, which is being modified. TOP_P is nonzero if this
5711 call is going to be used to determine whether we need a temporary
5712 for EXP, as opposed to a recursive call to this function.
5714 It is always safe for this routine to return zero since it merely
5715 searches for optimization opportunities. */
5718 safe_from_p (x, exp, top_p)
5725 static tree save_expr_list;
5728 /* If EXP has varying size, we MUST use a target since we currently
5729 have no way of allocating temporaries of variable size
5730 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5731 So we assume here that something at a higher level has prevented a
5732 clash. This is somewhat bogus, but the best we can do. Only
5733 do this when X is BLKmode and when we are at the top level. */
5734 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5735 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5736 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5737 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5738 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5740 && GET_MODE (x) == BLKmode)
5741 /* If X is in the outgoing argument area, it is always safe. */
5742 || (GET_CODE (x) == MEM
5743 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5744 || (GET_CODE (XEXP (x, 0)) == PLUS
5745 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5748 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5749 find the underlying pseudo. */
5750 if (GET_CODE (x) == SUBREG)
5753 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5757 /* A SAVE_EXPR might appear many times in the expression passed to the
5758 top-level safe_from_p call, and if it has a complex subexpression,
5759 examining it multiple times could result in a combinatorial explosion.
5760 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5761 with optimization took about 28 minutes to compile -- even though it was
5762 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5763 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5764 we have processed. Note that the only test of top_p was above. */
5773 rtn = safe_from_p (x, exp, 0);
5775 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5776 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5781 /* Now look at our tree code and possibly recurse. */
5782 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5785 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5792 if (TREE_CODE (exp) == TREE_LIST)
5793 return ((TREE_VALUE (exp) == 0
5794 || safe_from_p (x, TREE_VALUE (exp), 0))
5795 && (TREE_CHAIN (exp) == 0
5796 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5797 else if (TREE_CODE (exp) == ERROR_MARK)
5798 return 1; /* An already-visited SAVE_EXPR? */
5803 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5807 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5808 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5812 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5813 the expression. If it is set, we conflict iff we are that rtx or
5814 both are in memory. Otherwise, we check all operands of the
5815 expression recursively. */
5817 switch (TREE_CODE (exp))
5820 return (staticp (TREE_OPERAND (exp, 0))
5821 || TREE_STATIC (exp)
5822 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5825 if (GET_CODE (x) == MEM
5826 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5827 get_alias_set (exp)))
5832 /* Assume that the call will clobber all hard registers and
5834 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5835 || GET_CODE (x) == MEM)
5840 /* If a sequence exists, we would have to scan every instruction
5841 in the sequence to see if it was safe. This is probably not
5843 if (RTL_EXPR_SEQUENCE (exp))
5846 exp_rtl = RTL_EXPR_RTL (exp);
5849 case WITH_CLEANUP_EXPR:
5850 exp_rtl = RTL_EXPR_RTL (exp);
5853 case CLEANUP_POINT_EXPR:
5854 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5857 exp_rtl = SAVE_EXPR_RTL (exp);
5861 /* If we've already scanned this, don't do it again. Otherwise,
5862 show we've scanned it and record for clearing the flag if we're
5864 if (TREE_PRIVATE (exp))
5867 TREE_PRIVATE (exp) = 1;
5868 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5870 TREE_PRIVATE (exp) = 0;
5874 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5878 /* The only operand we look at is operand 1. The rest aren't
5879 part of the expression. */
5880 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5882 case METHOD_CALL_EXPR:
5883 /* This takes a rtx argument, but shouldn't appear here. */
5890 /* If we have an rtx, we do not need to scan our operands. */
5894 nops = first_rtl_op (TREE_CODE (exp));
5895 for (i = 0; i < nops; i++)
5896 if (TREE_OPERAND (exp, i) != 0
5897 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5900 /* If this is a language-specific tree code, it may require
5901 special handling. */
5902 if ((unsigned int) TREE_CODE (exp)
5903 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5905 && !(*lang_safe_from_p) (x, exp))
5909 /* If we have an rtl, find any enclosed object. Then see if we conflict
5913 if (GET_CODE (exp_rtl) == SUBREG)
5915 exp_rtl = SUBREG_REG (exp_rtl);
5916 if (GET_CODE (exp_rtl) == REG
5917 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5921 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5922 are memory and they conflict. */
5923 return ! (rtx_equal_p (x, exp_rtl)
5924 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5925 && true_dependence (exp_rtl, GET_MODE (x), x,
5926 rtx_addr_varies_p)));
5929 /* If we reach here, it is safe. */
5933 /* Subroutine of expand_expr: return nonzero iff EXP is an
5934 expression whose type is statically determinable. */
5940 if (TREE_CODE (exp) == PARM_DECL
5941 || TREE_CODE (exp) == VAR_DECL
5942 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5943 || TREE_CODE (exp) == COMPONENT_REF
5944 || TREE_CODE (exp) == ARRAY_REF)
5949 /* Subroutine of expand_expr: return rtx if EXP is a
5950 variable or parameter; else return 0. */
5957 switch (TREE_CODE (exp))
5961 return DECL_RTL (exp);
5967 #ifdef MAX_INTEGER_COMPUTATION_MODE
5970 check_max_integer_computation_mode (exp)
5973 enum tree_code code;
5974 enum machine_mode mode;
5976 /* Strip any NOPs that don't change the mode. */
5978 code = TREE_CODE (exp);
5980 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5981 if (code == NOP_EXPR
5982 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5985 /* First check the type of the overall operation. We need only look at
5986 unary, binary and relational operations. */
5987 if (TREE_CODE_CLASS (code) == '1'
5988 || TREE_CODE_CLASS (code) == '2'
5989 || TREE_CODE_CLASS (code) == '<')
5991 mode = TYPE_MODE (TREE_TYPE (exp));
5992 if (GET_MODE_CLASS (mode) == MODE_INT
5993 && mode > MAX_INTEGER_COMPUTATION_MODE)
5994 internal_error ("unsupported wide integer operation");
5997 /* Check operand of a unary op. */
5998 if (TREE_CODE_CLASS (code) == '1')
6000 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6001 if (GET_MODE_CLASS (mode) == MODE_INT
6002 && mode > MAX_INTEGER_COMPUTATION_MODE)
6003 internal_error ("unsupported wide integer operation");
6006 /* Check operands of a binary/comparison op. */
6007 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6009 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6010 if (GET_MODE_CLASS (mode) == MODE_INT
6011 && mode > MAX_INTEGER_COMPUTATION_MODE)
6012 internal_error ("unsupported wide integer operation");
6014 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6015 if (GET_MODE_CLASS (mode) == MODE_INT
6016 && mode > MAX_INTEGER_COMPUTATION_MODE)
6017 internal_error ("unsupported wide integer operation");
6022 /* expand_expr: generate code for computing expression EXP.
6023 An rtx for the computed value is returned. The value is never null.
6024 In the case of a void EXP, const0_rtx is returned.
6026 The value may be stored in TARGET if TARGET is nonzero.
6027 TARGET is just a suggestion; callers must assume that
6028 the rtx returned may not be the same as TARGET.
6030 If TARGET is CONST0_RTX, it means that the value will be ignored.
6032 If TMODE is not VOIDmode, it suggests generating the
6033 result in mode TMODE. But this is done only when convenient.
6034 Otherwise, TMODE is ignored and the value generated in its natural mode.
6035 TMODE is just a suggestion; callers must assume that
6036 the rtx returned may not have mode TMODE.
6038 Note that TARGET may have neither TMODE nor MODE. In that case, it
6039 probably will not be used.
6041 If MODIFIER is EXPAND_SUM then when EXP is an addition
6042 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6043 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6044 products as above, or REG or MEM, or constant.
6045 Ordinarily in such cases we would output mul or add instructions
6046 and then return a pseudo reg containing the sum.
6048 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6049 it also marks a label as absolutely required (it can't be dead).
6050 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6051 This is used for outputting expressions used in initializers.
6053 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6054 with a constant address even if that address is not normally legitimate.
6055 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6058 expand_expr (exp, target, tmode, modifier)
6061 enum machine_mode tmode;
6062 enum expand_modifier modifier;
6064 register rtx op0, op1, temp;
6065 tree type = TREE_TYPE (exp);
6066 int unsignedp = TREE_UNSIGNED (type);
6067 register enum machine_mode mode;
6068 register enum tree_code code = TREE_CODE (exp);
6070 rtx subtarget, original_target;
6073 /* Used by check-memory-usage to make modifier read only. */
6074 enum expand_modifier ro_modifier;
6076 /* Handle ERROR_MARK before anybody tries to access its type. */
6077 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6079 op0 = CONST0_RTX (tmode);
6085 mode = TYPE_MODE (type);
6086 /* Use subtarget as the target for operand 0 of a binary operation. */
6087 subtarget = get_subtarget (target);
6088 original_target = target;
6089 ignore = (target == const0_rtx
6090 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6091 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6092 || code == COND_EXPR)
6093 && TREE_CODE (type) == VOID_TYPE));
6095 /* Make a read-only version of the modifier. */
6096 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6097 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6098 ro_modifier = modifier;
6100 ro_modifier = EXPAND_NORMAL;
6102 /* If we are going to ignore this result, we need only do something
6103 if there is a side-effect somewhere in the expression. If there
6104 is, short-circuit the most common cases here. Note that we must
6105 not call expand_expr with anything but const0_rtx in case this
6106 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6110 if (! TREE_SIDE_EFFECTS (exp))
6113 /* Ensure we reference a volatile object even if value is ignored, but
6114 don't do this if all we are doing is taking its address. */
6115 if (TREE_THIS_VOLATILE (exp)
6116 && TREE_CODE (exp) != FUNCTION_DECL
6117 && mode != VOIDmode && mode != BLKmode
6118 && modifier != EXPAND_CONST_ADDRESS)
6120 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6121 if (GET_CODE (temp) == MEM)
6122 temp = copy_to_reg (temp);
6126 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6127 || code == INDIRECT_REF || code == BUFFER_REF)
6128 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6129 VOIDmode, ro_modifier);
6130 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6131 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6133 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6135 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6139 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6140 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6141 /* If the second operand has no side effects, just evaluate
6143 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6144 VOIDmode, ro_modifier);
6145 else if (code == BIT_FIELD_REF)
6147 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6149 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6151 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6159 #ifdef MAX_INTEGER_COMPUTATION_MODE
6160 /* Only check stuff here if the mode we want is different from the mode
6161 of the expression; if it's the same, check_max_integer_computiation_mode
6162 will handle it. Do we really need to check this stuff at all? */
6165 && GET_MODE (target) != mode
6166 && TREE_CODE (exp) != INTEGER_CST
6167 && TREE_CODE (exp) != PARM_DECL
6168 && TREE_CODE (exp) != ARRAY_REF
6169 && TREE_CODE (exp) != ARRAY_RANGE_REF
6170 && TREE_CODE (exp) != COMPONENT_REF
6171 && TREE_CODE (exp) != BIT_FIELD_REF
6172 && TREE_CODE (exp) != INDIRECT_REF
6173 && TREE_CODE (exp) != CALL_EXPR
6174 && TREE_CODE (exp) != VAR_DECL
6175 && TREE_CODE (exp) != RTL_EXPR)
6177 enum machine_mode mode = GET_MODE (target);
6179 if (GET_MODE_CLASS (mode) == MODE_INT
6180 && mode > MAX_INTEGER_COMPUTATION_MODE)
6181 internal_error ("unsupported wide integer operation");
6185 && TREE_CODE (exp) != INTEGER_CST
6186 && TREE_CODE (exp) != PARM_DECL
6187 && TREE_CODE (exp) != ARRAY_REF
6188 && TREE_CODE (exp) != ARRAY_RANGE_REF
6189 && TREE_CODE (exp) != COMPONENT_REF
6190 && TREE_CODE (exp) != BIT_FIELD_REF
6191 && TREE_CODE (exp) != INDIRECT_REF
6192 && TREE_CODE (exp) != VAR_DECL
6193 && TREE_CODE (exp) != CALL_EXPR
6194 && TREE_CODE (exp) != RTL_EXPR
6195 && GET_MODE_CLASS (tmode) == MODE_INT
6196 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6197 internal_error ("unsupported wide integer operation");
6199 check_max_integer_computation_mode (exp);
6202 /* If will do cse, generate all results into pseudo registers
6203 since 1) that allows cse to find more things
6204 and 2) otherwise cse could produce an insn the machine
6207 if (! cse_not_expected && mode != BLKmode && target
6208 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6215 tree function = decl_function_context (exp);
6216 /* Handle using a label in a containing function. */
6217 if (function != current_function_decl
6218 && function != inline_function_decl && function != 0)
6220 struct function *p = find_function_data (function);
6221 p->expr->x_forced_labels
6222 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6223 p->expr->x_forced_labels);
6227 if (modifier == EXPAND_INITIALIZER)
6228 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6233 temp = gen_rtx_MEM (FUNCTION_MODE,
6234 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6235 if (function != current_function_decl
6236 && function != inline_function_decl && function != 0)
6237 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6242 if (DECL_RTL (exp) == 0)
6244 error_with_decl (exp, "prior parameter's size depends on `%s'");
6245 return CONST0_RTX (mode);
6248 /* ... fall through ... */
6251 /* If a static var's type was incomplete when the decl was written,
6252 but the type is complete now, lay out the decl now. */
6253 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6254 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6256 layout_decl (exp, 0);
6257 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6260 /* Although static-storage variables start off initialized, according to
6261 ANSI C, a memcpy could overwrite them with uninitialized values. So
6262 we check them too. This also lets us check for read-only variables
6263 accessed via a non-const declaration, in case it won't be detected
6264 any other way (e.g., in an embedded system or OS kernel without
6267 Aggregates are not checked here; they're handled elsewhere. */
6268 if (cfun && current_function_check_memory_usage
6270 && GET_CODE (DECL_RTL (exp)) == MEM
6271 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6273 enum memory_use_mode memory_usage;
6274 memory_usage = get_memory_usage_from_modifier (modifier);
6276 in_check_memory_usage = 1;
6277 if (memory_usage != MEMORY_USE_DONT)
6278 emit_library_call (chkr_check_addr_libfunc,
6279 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6280 XEXP (DECL_RTL (exp), 0), Pmode,
6281 GEN_INT (int_size_in_bytes (type)),
6282 TYPE_MODE (sizetype),
6283 GEN_INT (memory_usage),
6284 TYPE_MODE (integer_type_node));
6285 in_check_memory_usage = 0;
6288 /* ... fall through ... */
6292 if (DECL_RTL (exp) == 0)
6295 /* Ensure variable marked as used even if it doesn't go through
6296 a parser. If it hasn't be used yet, write out an external
6298 if (! TREE_USED (exp))
6300 assemble_external (exp);
6301 TREE_USED (exp) = 1;
6304 /* Show we haven't gotten RTL for this yet. */
6307 /* Handle variables inherited from containing functions. */
6308 context = decl_function_context (exp);
6310 /* We treat inline_function_decl as an alias for the current function
6311 because that is the inline function whose vars, types, etc.
6312 are being merged into the current function.
6313 See expand_inline_function. */
6315 if (context != 0 && context != current_function_decl
6316 && context != inline_function_decl
6317 /* If var is static, we don't need a static chain to access it. */
6318 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6319 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6323 /* Mark as non-local and addressable. */
6324 DECL_NONLOCAL (exp) = 1;
6325 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6327 mark_addressable (exp);
6328 if (GET_CODE (DECL_RTL (exp)) != MEM)
6330 addr = XEXP (DECL_RTL (exp), 0);
6331 if (GET_CODE (addr) == MEM)
6333 = replace_equiv_address (addr,
6334 fix_lexical_addr (XEXP (addr, 0), exp));
6336 addr = fix_lexical_addr (addr, exp);
6338 temp = replace_equiv_address (DECL_RTL (exp), addr);
6341 /* This is the case of an array whose size is to be determined
6342 from its initializer, while the initializer is still being parsed.
6345 else if (GET_CODE (DECL_RTL (exp)) == MEM
6346 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6347 temp = validize_mem (DECL_RTL (exp));
6349 /* If DECL_RTL is memory, we are in the normal case and either
6350 the address is not valid or it is not a register and -fforce-addr
6351 is specified, get the address into a register. */
6353 else if (GET_CODE (DECL_RTL (exp)) == MEM
6354 && modifier != EXPAND_CONST_ADDRESS
6355 && modifier != EXPAND_SUM
6356 && modifier != EXPAND_INITIALIZER
6357 && (! memory_address_p (DECL_MODE (exp),
6358 XEXP (DECL_RTL (exp), 0))
6360 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6361 temp = replace_equiv_address (DECL_RTL (exp),
6362 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6364 /* If we got something, return it. But first, set the alignment
6365 if the address is a register. */
6368 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6369 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6374 /* If the mode of DECL_RTL does not match that of the decl, it
6375 must be a promoted value. We return a SUBREG of the wanted mode,
6376 but mark it so that we know that it was already extended. */
6378 if (GET_CODE (DECL_RTL (exp)) == REG
6379 && GET_MODE (DECL_RTL (exp)) != mode)
6381 /* Get the signedness used for this variable. Ensure we get the
6382 same mode we got when the variable was declared. */
6383 if (GET_MODE (DECL_RTL (exp))
6384 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6387 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6388 SUBREG_PROMOTED_VAR_P (temp) = 1;
6389 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6393 return DECL_RTL (exp);
6396 return immed_double_const (TREE_INT_CST_LOW (exp),
6397 TREE_INT_CST_HIGH (exp), mode);
6400 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6401 EXPAND_MEMORY_USE_BAD);
6404 /* If optimized, generate immediate CONST_DOUBLE
6405 which will be turned into memory by reload if necessary.
6407 We used to force a register so that loop.c could see it. But
6408 this does not allow gen_* patterns to perform optimizations with
6409 the constants. It also produces two insns in cases like "x = 1.0;".
6410 On most machines, floating-point constants are not permitted in
6411 many insns, so we'd end up copying it to a register in any case.
6413 Now, we do the copying in expand_binop, if appropriate. */
6414 return immed_real_const (exp);
6418 if (! TREE_CST_RTL (exp))
6419 output_constant_def (exp, 1);
6421 /* TREE_CST_RTL probably contains a constant address.
6422 On RISC machines where a constant address isn't valid,
6423 make some insns to get that address into a register. */
6424 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6425 && modifier != EXPAND_CONST_ADDRESS
6426 && modifier != EXPAND_INITIALIZER
6427 && modifier != EXPAND_SUM
6428 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6430 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6431 return replace_equiv_address (TREE_CST_RTL (exp),
6432 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6433 return TREE_CST_RTL (exp);
6435 case EXPR_WITH_FILE_LOCATION:
6438 const char *saved_input_filename = input_filename;
6439 int saved_lineno = lineno;
6440 input_filename = EXPR_WFL_FILENAME (exp);
6441 lineno = EXPR_WFL_LINENO (exp);
6442 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6443 emit_line_note (input_filename, lineno);
6444 /* Possibly avoid switching back and force here. */
6445 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6446 input_filename = saved_input_filename;
6447 lineno = saved_lineno;
6452 context = decl_function_context (exp);
6454 /* If this SAVE_EXPR was at global context, assume we are an
6455 initialization function and move it into our context. */
6457 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6459 /* We treat inline_function_decl as an alias for the current function
6460 because that is the inline function whose vars, types, etc.
6461 are being merged into the current function.
6462 See expand_inline_function. */
6463 if (context == current_function_decl || context == inline_function_decl)
6466 /* If this is non-local, handle it. */
6469 /* The following call just exists to abort if the context is
6470 not of a containing function. */
6471 find_function_data (context);
6473 temp = SAVE_EXPR_RTL (exp);
6474 if (temp && GET_CODE (temp) == REG)
6476 put_var_into_stack (exp);
6477 temp = SAVE_EXPR_RTL (exp);
6479 if (temp == 0 || GET_CODE (temp) != MEM)
6482 replace_equiv_address (temp,
6483 fix_lexical_addr (XEXP (temp, 0), exp));
6485 if (SAVE_EXPR_RTL (exp) == 0)
6487 if (mode == VOIDmode)
6490 temp = assign_temp (build_qualified_type (type,
6492 | TYPE_QUAL_CONST)),
6495 SAVE_EXPR_RTL (exp) = temp;
6496 if (!optimize && GET_CODE (temp) == REG)
6497 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6500 /* If the mode of TEMP does not match that of the expression, it
6501 must be a promoted value. We pass store_expr a SUBREG of the
6502 wanted mode but mark it so that we know that it was already
6503 extended. Note that `unsignedp' was modified above in
6506 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6508 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6509 SUBREG_PROMOTED_VAR_P (temp) = 1;
6510 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6513 if (temp == const0_rtx)
6514 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6515 EXPAND_MEMORY_USE_BAD);
6517 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6519 TREE_USED (exp) = 1;
6522 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6523 must be a promoted value. We return a SUBREG of the wanted mode,
6524 but mark it so that we know that it was already extended. */
6526 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6527 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6529 /* Compute the signedness and make the proper SUBREG. */
6530 promote_mode (type, mode, &unsignedp, 0);
6531 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6532 SUBREG_PROMOTED_VAR_P (temp) = 1;
6533 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6537 return SAVE_EXPR_RTL (exp);
6542 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6543 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6547 case PLACEHOLDER_EXPR:
6549 tree placeholder_expr;
6551 /* If there is an object on the head of the placeholder list,
6552 see if some object in it of type TYPE or a pointer to it. For
6553 further information, see tree.def. */
6554 for (placeholder_expr = placeholder_list;
6555 placeholder_expr != 0;
6556 placeholder_expr = TREE_CHAIN (placeholder_expr))
6558 tree need_type = TYPE_MAIN_VARIANT (type);
6560 tree old_list = placeholder_list;
6563 /* Find the outermost reference that is of the type we want.
6564 If none, see if any object has a type that is a pointer to
6565 the type we want. */
6566 for (elt = TREE_PURPOSE (placeholder_expr);
6567 elt != 0 && object == 0;
6569 = ((TREE_CODE (elt) == COMPOUND_EXPR
6570 || TREE_CODE (elt) == COND_EXPR)
6571 ? TREE_OPERAND (elt, 1)
6572 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6573 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6574 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6575 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6576 ? TREE_OPERAND (elt, 0) : 0))
6577 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6580 for (elt = TREE_PURPOSE (placeholder_expr);
6581 elt != 0 && object == 0;
6583 = ((TREE_CODE (elt) == COMPOUND_EXPR
6584 || TREE_CODE (elt) == COND_EXPR)
6585 ? TREE_OPERAND (elt, 1)
6586 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6587 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6588 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6589 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6590 ? TREE_OPERAND (elt, 0) : 0))
6591 if (POINTER_TYPE_P (TREE_TYPE (elt))
6592 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6594 object = build1 (INDIRECT_REF, need_type, elt);
6598 /* Expand this object skipping the list entries before
6599 it was found in case it is also a PLACEHOLDER_EXPR.
6600 In that case, we want to translate it using subsequent
6602 placeholder_list = TREE_CHAIN (placeholder_expr);
6603 temp = expand_expr (object, original_target, tmode,
6605 placeholder_list = old_list;
6611 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6614 case WITH_RECORD_EXPR:
6615 /* Put the object on the placeholder list, expand our first operand,
6616 and pop the list. */
6617 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6619 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6620 tmode, ro_modifier);
6621 placeholder_list = TREE_CHAIN (placeholder_list);
6625 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6626 expand_goto (TREE_OPERAND (exp, 0));
6628 expand_computed_goto (TREE_OPERAND (exp, 0));
6632 expand_exit_loop_if_false (NULL,
6633 invert_truthvalue (TREE_OPERAND (exp, 0)));
6636 case LABELED_BLOCK_EXPR:
6637 if (LABELED_BLOCK_BODY (exp))
6638 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6639 /* Should perhaps use expand_label, but this is simpler and safer. */
6640 do_pending_stack_adjust ();
6641 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6644 case EXIT_BLOCK_EXPR:
6645 if (EXIT_BLOCK_RETURN (exp))
6646 sorry ("returned value in block_exit_expr");
6647 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6652 expand_start_loop (1);
6653 expand_expr_stmt (TREE_OPERAND (exp, 0));
6661 tree vars = TREE_OPERAND (exp, 0);
6662 int vars_need_expansion = 0;
6664 /* Need to open a binding contour here because
6665 if there are any cleanups they must be contained here. */
6666 expand_start_bindings (2);
6668 /* Mark the corresponding BLOCK for output in its proper place. */
6669 if (TREE_OPERAND (exp, 2) != 0
6670 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6671 insert_block (TREE_OPERAND (exp, 2));
6673 /* If VARS have not yet been expanded, expand them now. */
6676 if (!DECL_RTL_SET_P (vars))
6678 vars_need_expansion = 1;
6681 expand_decl_init (vars);
6682 vars = TREE_CHAIN (vars);
6685 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6687 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6693 if (RTL_EXPR_SEQUENCE (exp))
6695 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6697 emit_insns (RTL_EXPR_SEQUENCE (exp));
6698 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6700 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6701 free_temps_for_rtl_expr (exp);
6702 return RTL_EXPR_RTL (exp);
6705 /* If we don't need the result, just ensure we evaluate any
6710 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6711 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6712 EXPAND_MEMORY_USE_BAD);
6716 /* All elts simple constants => refer to a constant in memory. But
6717 if this is a non-BLKmode mode, let it store a field at a time
6718 since that should make a CONST_INT or CONST_DOUBLE when we
6719 fold. Likewise, if we have a target we can use, it is best to
6720 store directly into the target unless the type is large enough
6721 that memcpy will be used. If we are making an initializer and
6722 all operands are constant, put it in memory as well. */
6723 else if ((TREE_STATIC (exp)
6724 && ((mode == BLKmode
6725 && ! (target != 0 && safe_from_p (target, exp, 1)))
6726 || TREE_ADDRESSABLE (exp)
6727 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6728 && (! MOVE_BY_PIECES_P
6729 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6731 && ! mostly_zeros_p (exp))))
6732 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6734 rtx constructor = output_constant_def (exp, 1);
6736 if (modifier != EXPAND_CONST_ADDRESS
6737 && modifier != EXPAND_INITIALIZER
6738 && modifier != EXPAND_SUM)
6739 constructor = validize_mem (constructor);
6745 /* Handle calls that pass values in multiple non-contiguous
6746 locations. The Irix 6 ABI has examples of this. */
6747 if (target == 0 || ! safe_from_p (target, exp, 1)
6748 || GET_CODE (target) == PARALLEL)
6750 = assign_temp (build_qualified_type (type,
6752 | (TREE_READONLY (exp)
6753 * TYPE_QUAL_CONST))),
6754 TREE_ADDRESSABLE (exp), 1, 1);
6756 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6757 int_size_in_bytes (TREE_TYPE (exp)));
6763 tree exp1 = TREE_OPERAND (exp, 0);
6765 tree string = string_constant (exp1, &index);
6767 /* Try to optimize reads from const strings. */
6769 && TREE_CODE (string) == STRING_CST
6770 && TREE_CODE (index) == INTEGER_CST
6771 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6772 && GET_MODE_CLASS (mode) == MODE_INT
6773 && GET_MODE_SIZE (mode) == 1
6774 && modifier != EXPAND_MEMORY_USE_WO)
6776 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6778 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6779 op0 = memory_address (mode, op0);
6781 if (cfun && current_function_check_memory_usage
6782 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6784 enum memory_use_mode memory_usage;
6785 memory_usage = get_memory_usage_from_modifier (modifier);
6787 if (memory_usage != MEMORY_USE_DONT)
6789 in_check_memory_usage = 1;
6790 emit_library_call (chkr_check_addr_libfunc,
6791 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6792 Pmode, GEN_INT (int_size_in_bytes (type)),
6793 TYPE_MODE (sizetype),
6794 GEN_INT (memory_usage),
6795 TYPE_MODE (integer_type_node));
6796 in_check_memory_usage = 0;
6800 temp = gen_rtx_MEM (mode, op0);
6801 set_mem_attributes (temp, exp, 0);
6803 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6804 here, because, in C and C++, the fact that a location is accessed
6805 through a pointer to const does not mean that the value there can
6806 never change. Languages where it can never change should
6807 also set TREE_STATIC. */
6808 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6810 /* If we are writing to this object and its type is a record with
6811 readonly fields, we must mark it as readonly so it will
6812 conflict with readonly references to those fields. */
6813 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6814 RTX_UNCHANGING_P (temp) = 1;
6820 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6824 tree array = TREE_OPERAND (exp, 0);
6825 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6826 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6827 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6830 /* Optimize the special-case of a zero lower bound.
6832 We convert the low_bound to sizetype to avoid some problems
6833 with constant folding. (E.g. suppose the lower bound is 1,
6834 and its mode is QI. Without the conversion, (ARRAY
6835 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6836 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6838 if (! integer_zerop (low_bound))
6839 index = size_diffop (index, convert (sizetype, low_bound));
6841 /* Fold an expression like: "foo"[2].
6842 This is not done in fold so it won't happen inside &.
6843 Don't fold if this is for wide characters since it's too
6844 difficult to do correctly and this is a very rare case. */
6846 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6847 && TREE_CODE (array) == STRING_CST
6848 && TREE_CODE (index) == INTEGER_CST
6849 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6850 && GET_MODE_CLASS (mode) == MODE_INT
6851 && GET_MODE_SIZE (mode) == 1)
6853 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6855 /* If this is a constant index into a constant array,
6856 just get the value from the array. Handle both the cases when
6857 we have an explicit constructor and when our operand is a variable
6858 that was declared const. */
6860 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6861 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6862 && TREE_CODE (index) == INTEGER_CST
6863 && 0 > compare_tree_int (index,
6864 list_length (CONSTRUCTOR_ELTS
6865 (TREE_OPERAND (exp, 0)))))
6869 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6870 i = TREE_INT_CST_LOW (index);
6871 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6875 return expand_expr (fold (TREE_VALUE (elem)), target,
6876 tmode, ro_modifier);
6879 else if (optimize >= 1
6880 && modifier != EXPAND_CONST_ADDRESS
6881 && modifier != EXPAND_INITIALIZER
6882 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6883 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6884 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6886 if (TREE_CODE (index) == INTEGER_CST)
6888 tree init = DECL_INITIAL (array);
6890 if (TREE_CODE (init) == CONSTRUCTOR)
6894 for (elem = CONSTRUCTOR_ELTS (init);
6896 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6897 elem = TREE_CHAIN (elem))
6900 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6901 return expand_expr (fold (TREE_VALUE (elem)), target,
6902 tmode, ro_modifier);
6904 else if (TREE_CODE (init) == STRING_CST
6905 && 0 > compare_tree_int (index,
6906 TREE_STRING_LENGTH (init)))
6908 tree type = TREE_TYPE (TREE_TYPE (init));
6909 enum machine_mode mode = TYPE_MODE (type);
6911 if (GET_MODE_CLASS (mode) == MODE_INT
6912 && GET_MODE_SIZE (mode) == 1)
6914 (TREE_STRING_POINTER
6915 (init)[TREE_INT_CST_LOW (index)]));
6924 case ARRAY_RANGE_REF:
6925 /* If the operand is a CONSTRUCTOR, we can just extract the
6926 appropriate field if it is present. Don't do this if we have
6927 already written the data since we want to refer to that copy
6928 and varasm.c assumes that's what we'll do. */
6929 if (code == COMPONENT_REF
6930 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6931 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6935 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6936 elt = TREE_CHAIN (elt))
6937 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6938 /* We can normally use the value of the field in the
6939 CONSTRUCTOR. However, if this is a bitfield in
6940 an integral mode that we can fit in a HOST_WIDE_INT,
6941 we must mask only the number of bits in the bitfield,
6942 since this is done implicitly by the constructor. If
6943 the bitfield does not meet either of those conditions,
6944 we can't do this optimization. */
6945 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6946 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6948 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6949 <= HOST_BITS_PER_WIDE_INT))))
6951 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6952 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6954 HOST_WIDE_INT bitsize
6955 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6957 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6959 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6960 op0 = expand_and (op0, op1, target);
6964 enum machine_mode imode
6965 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6967 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6970 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6972 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6982 enum machine_mode mode1;
6983 HOST_WIDE_INT bitsize, bitpos;
6986 unsigned int alignment;
6987 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6988 &mode1, &unsignedp, &volatilep,
6991 /* If we got back the original object, something is wrong. Perhaps
6992 we are evaluating an expression too early. In any event, don't
6993 infinitely recurse. */
6997 /* If TEM's type is a union of variable size, pass TARGET to the inner
6998 computation, since it will need a temporary and TARGET is known
6999 to have to do. This occurs in unchecked conversion in Ada. */
7001 op0 = expand_expr (tem,
7002 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7003 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7005 ? target : NULL_RTX),
7007 (modifier == EXPAND_INITIALIZER
7008 || modifier == EXPAND_CONST_ADDRESS)
7009 ? modifier : EXPAND_NORMAL);
7011 /* If this is a constant, put it into a register if it is a
7012 legitimate constant and OFFSET is 0 and memory if it isn't. */
7013 if (CONSTANT_P (op0))
7015 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7016 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7018 op0 = force_reg (mode, op0);
7020 op0 = validize_mem (force_const_mem (mode, op0));
7025 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7027 /* If this object is in a register, put it into memory.
7028 This case can't occur in C, but can in Ada if we have
7029 unchecked conversion of an expression from a scalar type to
7030 an array or record type. */
7031 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7032 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7034 /* If the operand is a SAVE_EXPR, we can deal with this by
7035 forcing the SAVE_EXPR into memory. */
7036 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7038 put_var_into_stack (TREE_OPERAND (exp, 0));
7039 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7044 = build_qualified_type (TREE_TYPE (tem),
7045 (TYPE_QUALS (TREE_TYPE (tem))
7046 | TYPE_QUAL_CONST));
7047 rtx memloc = assign_temp (nt, 1, 1, 1);
7049 mark_temp_addr_taken (memloc);
7050 emit_move_insn (memloc, op0);
7055 if (GET_CODE (op0) != MEM)
7058 if (GET_MODE (offset_rtx) != ptr_mode)
7060 #ifdef POINTERS_EXTEND_UNSIGNED
7061 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7063 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7067 /* A constant address in OP0 can have VOIDmode, we must not try
7068 to call force_reg for that case. Avoid that case. */
7069 if (GET_CODE (op0) == MEM
7070 && GET_MODE (op0) == BLKmode
7071 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7073 && (bitpos % bitsize) == 0
7074 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7075 && alignment == GET_MODE_ALIGNMENT (mode1))
7077 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7079 if (GET_CODE (XEXP (temp, 0)) == REG)
7082 op0 = (replace_equiv_address
7084 force_reg (GET_MODE (XEXP (temp, 0)),
7089 op0 = change_address (op0, VOIDmode,
7090 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7091 force_reg (ptr_mode,
7095 /* Don't forget about volatility even if this is a bitfield. */
7096 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7098 op0 = copy_rtx (op0);
7099 MEM_VOLATILE_P (op0) = 1;
7102 /* Check the access. */
7103 if (cfun != 0 && current_function_check_memory_usage
7104 && GET_CODE (op0) == MEM)
7106 enum memory_use_mode memory_usage;
7107 memory_usage = get_memory_usage_from_modifier (modifier);
7109 if (memory_usage != MEMORY_USE_DONT)
7114 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7115 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7117 /* Check the access right of the pointer. */
7118 in_check_memory_usage = 1;
7119 if (size > BITS_PER_UNIT)
7120 emit_library_call (chkr_check_addr_libfunc,
7121 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7122 Pmode, GEN_INT (size / BITS_PER_UNIT),
7123 TYPE_MODE (sizetype),
7124 GEN_INT (memory_usage),
7125 TYPE_MODE (integer_type_node));
7126 in_check_memory_usage = 0;
7130 /* In cases where an aligned union has an unaligned object
7131 as a field, we might be extracting a BLKmode value from
7132 an integer-mode (e.g., SImode) object. Handle this case
7133 by doing the extract into an object as wide as the field
7134 (which we know to be the width of a basic mode), then
7135 storing into memory, and changing the mode to BLKmode. */
7136 if (mode1 == VOIDmode
7137 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7138 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7139 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7140 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7141 && modifier != EXPAND_CONST_ADDRESS
7142 && modifier != EXPAND_INITIALIZER)
7143 /* If the field isn't aligned enough to fetch as a memref,
7144 fetch it as a bit field. */
7145 || (mode1 != BLKmode
7146 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7147 && ((TYPE_ALIGN (TREE_TYPE (tem))
7148 < GET_MODE_ALIGNMENT (mode))
7149 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7150 /* If the type and the field are a constant size and the
7151 size of the type isn't the same size as the bitfield,
7152 we must use bitfield operations. */
7154 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7156 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7159 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7160 && (TYPE_ALIGN (type) > alignment
7161 || bitpos % TYPE_ALIGN (type) != 0)))
7163 enum machine_mode ext_mode = mode;
7165 if (ext_mode == BLKmode
7166 && ! (target != 0 && GET_CODE (op0) == MEM
7167 && GET_CODE (target) == MEM
7168 && bitpos % BITS_PER_UNIT == 0))
7169 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7171 if (ext_mode == BLKmode)
7173 /* In this case, BITPOS must start at a byte boundary and
7174 TARGET, if specified, must be a MEM. */
7175 if (GET_CODE (op0) != MEM
7176 || (target != 0 && GET_CODE (target) != MEM)
7177 || bitpos % BITS_PER_UNIT != 0)
7180 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7182 target = assign_temp (type, 0, 1, 1);
7184 emit_block_move (target, op0,
7185 bitsize == -1 ? expr_size (exp)
7186 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7193 op0 = validize_mem (op0);
7195 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7196 mark_reg_pointer (XEXP (op0, 0), alignment);
7198 op0 = extract_bit_field (op0, bitsize, bitpos,
7199 unsignedp, target, ext_mode, ext_mode,
7201 int_size_in_bytes (TREE_TYPE (tem)));
7203 /* If the result is a record type and BITSIZE is narrower than
7204 the mode of OP0, an integral mode, and this is a big endian
7205 machine, we must put the field into the high-order bits. */
7206 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7207 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7208 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7209 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7210 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7214 if (mode == BLKmode)
7216 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7218 rtx new = assign_temp (nt, 0, 1, 1);
7220 emit_move_insn (new, op0);
7221 op0 = copy_rtx (new);
7222 PUT_MODE (op0, BLKmode);
7228 /* If the result is BLKmode, use that to access the object
7230 if (mode == BLKmode)
7233 /* Get a reference to just this component. */
7234 if (modifier == EXPAND_CONST_ADDRESS
7235 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7236 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7238 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7240 set_mem_attributes (op0, exp, 0);
7241 if (GET_CODE (XEXP (op0, 0)) == REG)
7242 mark_reg_pointer (XEXP (op0, 0), alignment);
7244 MEM_VOLATILE_P (op0) |= volatilep;
7245 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7246 || modifier == EXPAND_CONST_ADDRESS
7247 || modifier == EXPAND_INITIALIZER)
7249 else if (target == 0)
7250 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7252 convert_move (target, op0, unsignedp);
7256 /* Intended for a reference to a buffer of a file-object in Pascal.
7257 But it's not certain that a special tree code will really be
7258 necessary for these. INDIRECT_REF might work for them. */
7264 /* Pascal set IN expression.
7267 rlo = set_low - (set_low%bits_per_word);
7268 the_word = set [ (index - rlo)/bits_per_word ];
7269 bit_index = index % bits_per_word;
7270 bitmask = 1 << bit_index;
7271 return !!(the_word & bitmask); */
7273 tree set = TREE_OPERAND (exp, 0);
7274 tree index = TREE_OPERAND (exp, 1);
7275 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7276 tree set_type = TREE_TYPE (set);
7277 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7278 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7279 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7280 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7281 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7282 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7283 rtx setaddr = XEXP (setval, 0);
7284 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7286 rtx diff, quo, rem, addr, bit, result;
7288 /* If domain is empty, answer is no. Likewise if index is constant
7289 and out of bounds. */
7290 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7291 && TREE_CODE (set_low_bound) == INTEGER_CST
7292 && tree_int_cst_lt (set_high_bound, set_low_bound))
7293 || (TREE_CODE (index) == INTEGER_CST
7294 && TREE_CODE (set_low_bound) == INTEGER_CST
7295 && tree_int_cst_lt (index, set_low_bound))
7296 || (TREE_CODE (set_high_bound) == INTEGER_CST
7297 && TREE_CODE (index) == INTEGER_CST
7298 && tree_int_cst_lt (set_high_bound, index))))
7302 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7304 /* If we get here, we have to generate the code for both cases
7305 (in range and out of range). */
7307 op0 = gen_label_rtx ();
7308 op1 = gen_label_rtx ();
7310 if (! (GET_CODE (index_val) == CONST_INT
7311 && GET_CODE (lo_r) == CONST_INT))
7313 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7314 GET_MODE (index_val), iunsignedp, 0, op1);
7317 if (! (GET_CODE (index_val) == CONST_INT
7318 && GET_CODE (hi_r) == CONST_INT))
7320 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7321 GET_MODE (index_val), iunsignedp, 0, op1);
7324 /* Calculate the element number of bit zero in the first word
7326 if (GET_CODE (lo_r) == CONST_INT)
7327 rlow = GEN_INT (INTVAL (lo_r)
7328 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7330 rlow = expand_binop (index_mode, and_optab, lo_r,
7331 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7332 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7334 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7335 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7337 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7338 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7339 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7340 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7342 addr = memory_address (byte_mode,
7343 expand_binop (index_mode, add_optab, diff,
7344 setaddr, NULL_RTX, iunsignedp,
7347 /* Extract the bit we want to examine. */
7348 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7349 gen_rtx_MEM (byte_mode, addr),
7350 make_tree (TREE_TYPE (index), rem),
7352 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7353 GET_MODE (target) == byte_mode ? target : 0,
7354 1, OPTAB_LIB_WIDEN);
7356 if (result != target)
7357 convert_move (target, result, 1);
7359 /* Output the code to handle the out-of-range case. */
7362 emit_move_insn (target, const0_rtx);
7367 case WITH_CLEANUP_EXPR:
7368 if (RTL_EXPR_RTL (exp) == 0)
7371 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7372 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7374 /* That's it for this cleanup. */
7375 TREE_OPERAND (exp, 2) = 0;
7377 return RTL_EXPR_RTL (exp);
7379 case CLEANUP_POINT_EXPR:
7381 /* Start a new binding layer that will keep track of all cleanup
7382 actions to be performed. */
7383 expand_start_bindings (2);
7385 target_temp_slot_level = temp_slot_level;
7387 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7388 /* If we're going to use this value, load it up now. */
7390 op0 = force_not_mem (op0);
7391 preserve_temp_slots (op0);
7392 expand_end_bindings (NULL_TREE, 0, 0);
7397 /* Check for a built-in function. */
7398 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7399 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7401 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7403 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7404 == BUILT_IN_FRONTEND)
7405 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7407 return expand_builtin (exp, target, subtarget, tmode, ignore);
7410 return expand_call (exp, target, ignore);
7412 case NON_LVALUE_EXPR:
7415 case REFERENCE_EXPR:
7416 if (TREE_OPERAND (exp, 0) == error_mark_node)
7419 if (TREE_CODE (type) == UNION_TYPE)
7421 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7423 /* If both input and output are BLKmode, this conversion
7424 isn't actually doing anything unless we need to make the
7425 alignment stricter. */
7426 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7427 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7428 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7429 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7433 target = assign_temp (type, 0, 1, 1);
7435 if (GET_CODE (target) == MEM)
7436 /* Store data into beginning of memory target. */
7437 store_expr (TREE_OPERAND (exp, 0),
7438 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7440 else if (GET_CODE (target) == REG)
7441 /* Store this field into a union of the proper type. */
7442 store_field (target,
7443 MIN ((int_size_in_bytes (TREE_TYPE
7444 (TREE_OPERAND (exp, 0)))
7446 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7447 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7448 VOIDmode, 0, BITS_PER_UNIT,
7449 int_size_in_bytes (type), 0);
7453 /* Return the entire union. */
7457 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7459 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7462 /* If the signedness of the conversion differs and OP0 is
7463 a promoted SUBREG, clear that indication since we now
7464 have to do the proper extension. */
7465 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7466 && GET_CODE (op0) == SUBREG)
7467 SUBREG_PROMOTED_VAR_P (op0) = 0;
7472 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7473 if (GET_MODE (op0) == mode)
7476 /* If OP0 is a constant, just convert it into the proper mode. */
7477 if (CONSTANT_P (op0))
7479 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7480 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7482 if (modifier == EXPAND_INITIALIZER)
7483 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7487 convert_to_mode (mode, op0,
7488 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7490 convert_move (target, op0,
7491 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7495 /* We come here from MINUS_EXPR when the second operand is a
7498 this_optab = ! unsignedp && flag_trapv
7499 && (GET_MODE_CLASS(mode) == MODE_INT)
7500 ? addv_optab : add_optab;
7502 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7503 something else, make sure we add the register to the constant and
7504 then to the other thing. This case can occur during strength
7505 reduction and doing it this way will produce better code if the
7506 frame pointer or argument pointer is eliminated.
7508 fold-const.c will ensure that the constant is always in the inner
7509 PLUS_EXPR, so the only case we need to do anything about is if
7510 sp, ap, or fp is our second argument, in which case we must swap
7511 the innermost first argument and our second argument. */
7513 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7514 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7515 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7516 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7517 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7518 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7520 tree t = TREE_OPERAND (exp, 1);
7522 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7523 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7526 /* If the result is to be ptr_mode and we are adding an integer to
7527 something, we might be forming a constant. So try to use
7528 plus_constant. If it produces a sum and we can't accept it,
7529 use force_operand. This allows P = &ARR[const] to generate
7530 efficient code on machines where a SYMBOL_REF is not a valid
7533 If this is an EXPAND_SUM call, always return the sum. */
7534 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7535 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7537 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7538 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7539 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7543 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7545 /* Use immed_double_const to ensure that the constant is
7546 truncated according to the mode of OP1, then sign extended
7547 to a HOST_WIDE_INT. Using the constant directly can result
7548 in non-canonical RTL in a 64x32 cross compile. */
7550 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7552 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7553 op1 = plus_constant (op1, INTVAL (constant_part));
7554 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7555 op1 = force_operand (op1, target);
7559 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7560 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7561 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7565 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7567 if (! CONSTANT_P (op0))
7569 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7570 VOIDmode, modifier);
7571 /* Don't go to both_summands if modifier
7572 says it's not right to return a PLUS. */
7573 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7577 /* Use immed_double_const to ensure that the constant is
7578 truncated according to the mode of OP1, then sign extended
7579 to a HOST_WIDE_INT. Using the constant directly can result
7580 in non-canonical RTL in a 64x32 cross compile. */
7582 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7584 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7585 op0 = plus_constant (op0, INTVAL (constant_part));
7586 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7587 op0 = force_operand (op0, target);
7592 /* No sense saving up arithmetic to be done
7593 if it's all in the wrong mode to form part of an address.
7594 And force_operand won't know whether to sign-extend or
7596 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7597 || mode != ptr_mode)
7600 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7603 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7604 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7607 /* Make sure any term that's a sum with a constant comes last. */
7608 if (GET_CODE (op0) == PLUS
7609 && CONSTANT_P (XEXP (op0, 1)))
7615 /* If adding to a sum including a constant,
7616 associate it to put the constant outside. */
7617 if (GET_CODE (op1) == PLUS
7618 && CONSTANT_P (XEXP (op1, 1)))
7620 rtx constant_term = const0_rtx;
7622 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7625 /* Ensure that MULT comes first if there is one. */
7626 else if (GET_CODE (op0) == MULT)
7627 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7629 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7631 /* Let's also eliminate constants from op0 if possible. */
7632 op0 = eliminate_constant_term (op0, &constant_term);
7634 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7635 their sum should be a constant. Form it into OP1, since the
7636 result we want will then be OP0 + OP1. */
7638 temp = simplify_binary_operation (PLUS, mode, constant_term,
7643 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7646 /* Put a constant term last and put a multiplication first. */
7647 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7648 temp = op1, op1 = op0, op0 = temp;
7650 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7651 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7654 /* For initializers, we are allowed to return a MINUS of two
7655 symbolic constants. Here we handle all cases when both operands
7657 /* Handle difference of two symbolic constants,
7658 for the sake of an initializer. */
7659 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7660 && really_constant_p (TREE_OPERAND (exp, 0))
7661 && really_constant_p (TREE_OPERAND (exp, 1)))
7663 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7664 VOIDmode, ro_modifier);
7665 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7666 VOIDmode, ro_modifier);
7668 /* If the last operand is a CONST_INT, use plus_constant of
7669 the negated constant. Else make the MINUS. */
7670 if (GET_CODE (op1) == CONST_INT)
7671 return plus_constant (op0, - INTVAL (op1));
7673 return gen_rtx_MINUS (mode, op0, op1);
7675 /* Convert A - const to A + (-const). */
7676 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7678 tree negated = fold (build1 (NEGATE_EXPR, type,
7679 TREE_OPERAND (exp, 1)));
7681 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7682 /* If we can't negate the constant in TYPE, leave it alone and
7683 expand_binop will negate it for us. We used to try to do it
7684 here in the signed version of TYPE, but that doesn't work
7685 on POINTER_TYPEs. */;
7688 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7692 this_optab = ! unsignedp && flag_trapv
7693 && (GET_MODE_CLASS(mode) == MODE_INT)
7694 ? subv_optab : sub_optab;
7698 /* If first operand is constant, swap them.
7699 Thus the following special case checks need only
7700 check the second operand. */
7701 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7703 register tree t1 = TREE_OPERAND (exp, 0);
7704 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7705 TREE_OPERAND (exp, 1) = t1;
7708 /* Attempt to return something suitable for generating an
7709 indexed address, for machines that support that. */
7711 if (modifier == EXPAND_SUM && mode == ptr_mode
7712 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7713 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7715 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7718 /* Apply distributive law if OP0 is x+c. */
7719 if (GET_CODE (op0) == PLUS
7720 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7725 (mode, XEXP (op0, 0),
7726 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7727 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7728 * INTVAL (XEXP (op0, 1))));
7730 if (GET_CODE (op0) != REG)
7731 op0 = force_operand (op0, NULL_RTX);
7732 if (GET_CODE (op0) != REG)
7733 op0 = copy_to_mode_reg (mode, op0);
7736 gen_rtx_MULT (mode, op0,
7737 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7740 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7743 /* Check for multiplying things that have been extended
7744 from a narrower type. If this machine supports multiplying
7745 in that narrower type with a result in the desired type,
7746 do it that way, and avoid the explicit type-conversion. */
7747 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7748 && TREE_CODE (type) == INTEGER_TYPE
7749 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7750 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7751 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7752 && int_fits_type_p (TREE_OPERAND (exp, 1),
7753 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7754 /* Don't use a widening multiply if a shift will do. */
7755 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7756 > HOST_BITS_PER_WIDE_INT)
7757 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7759 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7760 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7762 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7763 /* If both operands are extended, they must either both
7764 be zero-extended or both be sign-extended. */
7765 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7767 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7769 enum machine_mode innermode
7770 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7771 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7772 ? smul_widen_optab : umul_widen_optab);
7773 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7774 ? umul_widen_optab : smul_widen_optab);
7775 if (mode == GET_MODE_WIDER_MODE (innermode))
7777 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7779 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7780 NULL_RTX, VOIDmode, 0);
7781 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7782 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7785 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7786 NULL_RTX, VOIDmode, 0);
7789 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7790 && innermode == word_mode)
7793 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7794 NULL_RTX, VOIDmode, 0);
7795 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7796 op1 = convert_modes (innermode, mode,
7797 expand_expr (TREE_OPERAND (exp, 1),
7798 NULL_RTX, VOIDmode, 0),
7801 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7802 NULL_RTX, VOIDmode, 0);
7803 temp = expand_binop (mode, other_optab, op0, op1, target,
7804 unsignedp, OPTAB_LIB_WIDEN);
7805 htem = expand_mult_highpart_adjust (innermode,
7806 gen_highpart (innermode, temp),
7808 gen_highpart (innermode, temp),
7810 emit_move_insn (gen_highpart (innermode, temp), htem);
7815 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7816 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7817 return expand_mult (mode, op0, op1, target, unsignedp);
7819 case TRUNC_DIV_EXPR:
7820 case FLOOR_DIV_EXPR:
7822 case ROUND_DIV_EXPR:
7823 case EXACT_DIV_EXPR:
7824 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7826 /* Possible optimization: compute the dividend with EXPAND_SUM
7827 then if the divisor is constant can optimize the case
7828 where some terms of the dividend have coeffs divisible by it. */
7829 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7830 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7831 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7834 this_optab = flodiv_optab;
7837 case TRUNC_MOD_EXPR:
7838 case FLOOR_MOD_EXPR:
7840 case ROUND_MOD_EXPR:
7841 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7843 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7844 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7845 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7847 case FIX_ROUND_EXPR:
7848 case FIX_FLOOR_EXPR:
7850 abort (); /* Not used for C. */
7852 case FIX_TRUNC_EXPR:
7853 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7855 target = gen_reg_rtx (mode);
7856 expand_fix (target, op0, unsignedp);
7860 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7862 target = gen_reg_rtx (mode);
7863 /* expand_float can't figure out what to do if FROM has VOIDmode.
7864 So give it the correct mode. With -O, cse will optimize this. */
7865 if (GET_MODE (op0) == VOIDmode)
7866 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7868 expand_float (target, op0,
7869 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7873 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7874 temp = expand_unop (mode,
7875 ! unsignedp && flag_trapv
7876 && (GET_MODE_CLASS(mode) == MODE_INT)
7877 ? negv_optab : neg_optab, op0, target, 0);
7883 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7885 /* Handle complex values specially. */
7886 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7887 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7888 return expand_complex_abs (mode, op0, target, unsignedp);
7890 /* Unsigned abs is simply the operand. Testing here means we don't
7891 risk generating incorrect code below. */
7892 if (TREE_UNSIGNED (type))
7895 return expand_abs (mode, op0, target, unsignedp,
7896 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7900 target = original_target;
7901 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7902 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7903 || GET_MODE (target) != mode
7904 || (GET_CODE (target) == REG
7905 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7906 target = gen_reg_rtx (mode);
7907 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7908 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7910 /* First try to do it with a special MIN or MAX instruction.
7911 If that does not win, use a conditional jump to select the proper
7913 this_optab = (TREE_UNSIGNED (type)
7914 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7915 : (code == MIN_EXPR ? smin_optab : smax_optab));
7917 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7922 /* At this point, a MEM target is no longer useful; we will get better
7925 if (GET_CODE (target) == MEM)
7926 target = gen_reg_rtx (mode);
7929 emit_move_insn (target, op0);
7931 op0 = gen_label_rtx ();
7933 /* If this mode is an integer too wide to compare properly,
7934 compare word by word. Rely on cse to optimize constant cases. */
7935 if (GET_MODE_CLASS (mode) == MODE_INT
7936 && ! can_compare_p (GE, mode, ccp_jump))
7938 if (code == MAX_EXPR)
7939 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7940 target, op1, NULL_RTX, op0);
7942 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7943 op1, target, NULL_RTX, op0);
7947 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7948 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7949 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7952 emit_move_insn (target, op1);
7957 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7958 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7964 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7965 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7970 /* ??? Can optimize bitwise operations with one arg constant.
7971 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7972 and (a bitwise1 b) bitwise2 b (etc)
7973 but that is probably not worth while. */
7975 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7976 boolean values when we want in all cases to compute both of them. In
7977 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7978 as actual zero-or-1 values and then bitwise anding. In cases where
7979 there cannot be any side effects, better code would be made by
7980 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7981 how to recognize those cases. */
7983 case TRUTH_AND_EXPR:
7985 this_optab = and_optab;
7990 this_optab = ior_optab;
7993 case TRUTH_XOR_EXPR:
7995 this_optab = xor_optab;
8002 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8004 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8005 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8008 /* Could determine the answer when only additive constants differ. Also,
8009 the addition of one can be handled by changing the condition. */
8016 case UNORDERED_EXPR:
8023 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8027 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8028 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8030 && GET_CODE (original_target) == REG
8031 && (GET_MODE (original_target)
8032 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8034 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8037 if (temp != original_target)
8038 temp = copy_to_reg (temp);
8040 op1 = gen_label_rtx ();
8041 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8042 GET_MODE (temp), unsignedp, 0, op1);
8043 emit_move_insn (temp, const1_rtx);
8048 /* If no set-flag instruction, must generate a conditional
8049 store into a temporary variable. Drop through
8050 and handle this like && and ||. */
8052 case TRUTH_ANDIF_EXPR:
8053 case TRUTH_ORIF_EXPR:
8055 && (target == 0 || ! safe_from_p (target, exp, 1)
8056 /* Make sure we don't have a hard reg (such as function's return
8057 value) live across basic blocks, if not optimizing. */
8058 || (!optimize && GET_CODE (target) == REG
8059 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8060 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8063 emit_clr_insn (target);
8065 op1 = gen_label_rtx ();
8066 jumpifnot (exp, op1);
8069 emit_0_to_1_insn (target);
8072 return ignore ? const0_rtx : target;
8074 case TRUTH_NOT_EXPR:
8075 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8076 /* The parser is careful to generate TRUTH_NOT_EXPR
8077 only with operands that are always zero or one. */
8078 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8079 target, 1, OPTAB_LIB_WIDEN);
8085 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8087 return expand_expr (TREE_OPERAND (exp, 1),
8088 (ignore ? const0_rtx : target),
8092 /* If we would have a "singleton" (see below) were it not for a
8093 conversion in each arm, bring that conversion back out. */
8094 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8095 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8096 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8097 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8099 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8100 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8102 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8103 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8104 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8105 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8106 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8107 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8108 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8109 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8110 return expand_expr (build1 (NOP_EXPR, type,
8111 build (COND_EXPR, TREE_TYPE (iftrue),
8112 TREE_OPERAND (exp, 0),
8114 target, tmode, modifier);
8118 /* Note that COND_EXPRs whose type is a structure or union
8119 are required to be constructed to contain assignments of
8120 a temporary variable, so that we can evaluate them here
8121 for side effect only. If type is void, we must do likewise. */
8123 /* If an arm of the branch requires a cleanup,
8124 only that cleanup is performed. */
8127 tree binary_op = 0, unary_op = 0;
8129 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8130 convert it to our mode, if necessary. */
8131 if (integer_onep (TREE_OPERAND (exp, 1))
8132 && integer_zerop (TREE_OPERAND (exp, 2))
8133 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8137 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8142 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8143 if (GET_MODE (op0) == mode)
8147 target = gen_reg_rtx (mode);
8148 convert_move (target, op0, unsignedp);
8152 /* Check for X ? A + B : A. If we have this, we can copy A to the
8153 output and conditionally add B. Similarly for unary operations.
8154 Don't do this if X has side-effects because those side effects
8155 might affect A or B and the "?" operation is a sequence point in
8156 ANSI. (operand_equal_p tests for side effects.) */
8158 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8159 && operand_equal_p (TREE_OPERAND (exp, 2),
8160 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8161 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8162 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8163 && operand_equal_p (TREE_OPERAND (exp, 1),
8164 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8165 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8166 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8167 && operand_equal_p (TREE_OPERAND (exp, 2),
8168 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8169 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8170 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8171 && operand_equal_p (TREE_OPERAND (exp, 1),
8172 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8173 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8175 /* If we are not to produce a result, we have no target. Otherwise,
8176 if a target was specified use it; it will not be used as an
8177 intermediate target unless it is safe. If no target, use a
8182 else if (original_target
8183 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8184 || (singleton && GET_CODE (original_target) == REG
8185 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8186 && original_target == var_rtx (singleton)))
8187 && GET_MODE (original_target) == mode
8188 #ifdef HAVE_conditional_move
8189 && (! can_conditionally_move_p (mode)
8190 || GET_CODE (original_target) == REG
8191 || TREE_ADDRESSABLE (type))
8193 && ! (GET_CODE (original_target) == MEM
8194 && MEM_VOLATILE_P (original_target)))
8195 temp = original_target;
8196 else if (TREE_ADDRESSABLE (type))
8199 temp = assign_temp (type, 0, 0, 1);
8201 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8202 do the test of X as a store-flag operation, do this as
8203 A + ((X != 0) << log C). Similarly for other simple binary
8204 operators. Only do for C == 1 if BRANCH_COST is low. */
8205 if (temp && singleton && binary_op
8206 && (TREE_CODE (binary_op) == PLUS_EXPR
8207 || TREE_CODE (binary_op) == MINUS_EXPR
8208 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8209 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8210 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8211 : integer_onep (TREE_OPERAND (binary_op, 1)))
8212 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8215 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8216 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8217 ? addv_optab : add_optab)
8218 : TREE_CODE (binary_op) == MINUS_EXPR
8219 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8220 ? subv_optab : sub_optab)
8221 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8224 /* If we had X ? A : A + 1, do this as A + (X == 0).
8226 We have to invert the truth value here and then put it
8227 back later if do_store_flag fails. We cannot simply copy
8228 TREE_OPERAND (exp, 0) to another variable and modify that
8229 because invert_truthvalue can modify the tree pointed to
8231 if (singleton == TREE_OPERAND (exp, 1))
8232 TREE_OPERAND (exp, 0)
8233 = invert_truthvalue (TREE_OPERAND (exp, 0));
8235 result = do_store_flag (TREE_OPERAND (exp, 0),
8236 (safe_from_p (temp, singleton, 1)
8238 mode, BRANCH_COST <= 1);
8240 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8241 result = expand_shift (LSHIFT_EXPR, mode, result,
8242 build_int_2 (tree_log2
8246 (safe_from_p (temp, singleton, 1)
8247 ? temp : NULL_RTX), 0);
8251 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8252 return expand_binop (mode, boptab, op1, result, temp,
8253 unsignedp, OPTAB_LIB_WIDEN);
8255 else if (singleton == TREE_OPERAND (exp, 1))
8256 TREE_OPERAND (exp, 0)
8257 = invert_truthvalue (TREE_OPERAND (exp, 0));
8260 do_pending_stack_adjust ();
8262 op0 = gen_label_rtx ();
8264 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8268 /* If the target conflicts with the other operand of the
8269 binary op, we can't use it. Also, we can't use the target
8270 if it is a hard register, because evaluating the condition
8271 might clobber it. */
8273 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8274 || (GET_CODE (temp) == REG
8275 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8276 temp = gen_reg_rtx (mode);
8277 store_expr (singleton, temp, 0);
8280 expand_expr (singleton,
8281 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8282 if (singleton == TREE_OPERAND (exp, 1))
8283 jumpif (TREE_OPERAND (exp, 0), op0);
8285 jumpifnot (TREE_OPERAND (exp, 0), op0);
8287 start_cleanup_deferral ();
8288 if (binary_op && temp == 0)
8289 /* Just touch the other operand. */
8290 expand_expr (TREE_OPERAND (binary_op, 1),
8291 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8293 store_expr (build (TREE_CODE (binary_op), type,
8294 make_tree (type, temp),
8295 TREE_OPERAND (binary_op, 1)),
8298 store_expr (build1 (TREE_CODE (unary_op), type,
8299 make_tree (type, temp)),
8303 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8304 comparison operator. If we have one of these cases, set the
8305 output to A, branch on A (cse will merge these two references),
8306 then set the output to FOO. */
8308 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8309 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8310 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8311 TREE_OPERAND (exp, 1), 0)
8312 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8313 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8314 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8316 if (GET_CODE (temp) == REG
8317 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8318 temp = gen_reg_rtx (mode);
8319 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8320 jumpif (TREE_OPERAND (exp, 0), op0);
8322 start_cleanup_deferral ();
8323 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8327 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8328 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8329 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8330 TREE_OPERAND (exp, 2), 0)
8331 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8332 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8333 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8335 if (GET_CODE (temp) == REG
8336 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8337 temp = gen_reg_rtx (mode);
8338 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8339 jumpifnot (TREE_OPERAND (exp, 0), op0);
8341 start_cleanup_deferral ();
8342 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8347 op1 = gen_label_rtx ();
8348 jumpifnot (TREE_OPERAND (exp, 0), op0);
8350 start_cleanup_deferral ();
8352 /* One branch of the cond can be void, if it never returns. For
8353 example A ? throw : E */
8355 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8356 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8358 expand_expr (TREE_OPERAND (exp, 1),
8359 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8360 end_cleanup_deferral ();
8362 emit_jump_insn (gen_jump (op1));
8365 start_cleanup_deferral ();
8367 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8368 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8370 expand_expr (TREE_OPERAND (exp, 2),
8371 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8374 end_cleanup_deferral ();
8385 /* Something needs to be initialized, but we didn't know
8386 where that thing was when building the tree. For example,
8387 it could be the return value of a function, or a parameter
8388 to a function which lays down in the stack, or a temporary
8389 variable which must be passed by reference.
8391 We guarantee that the expression will either be constructed
8392 or copied into our original target. */
8394 tree slot = TREE_OPERAND (exp, 0);
8395 tree cleanups = NULL_TREE;
8398 if (TREE_CODE (slot) != VAR_DECL)
8402 target = original_target;
8404 /* Set this here so that if we get a target that refers to a
8405 register variable that's already been used, put_reg_into_stack
8406 knows that it should fix up those uses. */
8407 TREE_USED (slot) = 1;
8411 if (DECL_RTL_SET_P (slot))
8413 target = DECL_RTL (slot);
8414 /* If we have already expanded the slot, so don't do
8416 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8421 target = assign_temp (type, 2, 0, 1);
8422 /* All temp slots at this level must not conflict. */
8423 preserve_temp_slots (target);
8424 SET_DECL_RTL (slot, target);
8425 if (TREE_ADDRESSABLE (slot))
8426 put_var_into_stack (slot);
8428 /* Since SLOT is not known to the called function
8429 to belong to its stack frame, we must build an explicit
8430 cleanup. This case occurs when we must build up a reference
8431 to pass the reference as an argument. In this case,
8432 it is very likely that such a reference need not be
8435 if (TREE_OPERAND (exp, 2) == 0)
8436 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8437 cleanups = TREE_OPERAND (exp, 2);
8442 /* This case does occur, when expanding a parameter which
8443 needs to be constructed on the stack. The target
8444 is the actual stack address that we want to initialize.
8445 The function we call will perform the cleanup in this case. */
8447 /* If we have already assigned it space, use that space,
8448 not target that we were passed in, as our target
8449 parameter is only a hint. */
8450 if (DECL_RTL_SET_P (slot))
8452 target = DECL_RTL (slot);
8453 /* If we have already expanded the slot, so don't do
8455 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8460 SET_DECL_RTL (slot, target);
8461 /* If we must have an addressable slot, then make sure that
8462 the RTL that we just stored in slot is OK. */
8463 if (TREE_ADDRESSABLE (slot))
8464 put_var_into_stack (slot);
8468 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8469 /* Mark it as expanded. */
8470 TREE_OPERAND (exp, 1) = NULL_TREE;
8472 store_expr (exp1, target, 0);
8474 expand_decl_cleanup (NULL_TREE, cleanups);
8481 tree lhs = TREE_OPERAND (exp, 0);
8482 tree rhs = TREE_OPERAND (exp, 1);
8483 tree noncopied_parts = 0;
8484 tree lhs_type = TREE_TYPE (lhs);
8486 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8487 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8489 = init_noncopied_parts (stabilize_reference (lhs),
8490 TYPE_NONCOPIED_PARTS (lhs_type));
8492 while (noncopied_parts != 0)
8494 expand_assignment (TREE_VALUE (noncopied_parts),
8495 TREE_PURPOSE (noncopied_parts), 0, 0);
8496 noncopied_parts = TREE_CHAIN (noncopied_parts);
8503 /* If lhs is complex, expand calls in rhs before computing it.
8504 That's so we don't compute a pointer and save it over a call.
8505 If lhs is simple, compute it first so we can give it as a
8506 target if the rhs is just a call. This avoids an extra temp and copy
8507 and that prevents a partial-subsumption which makes bad code.
8508 Actually we could treat component_ref's of vars like vars. */
8510 tree lhs = TREE_OPERAND (exp, 0);
8511 tree rhs = TREE_OPERAND (exp, 1);
8512 tree noncopied_parts = 0;
8513 tree lhs_type = TREE_TYPE (lhs);
8517 /* Check for |= or &= of a bitfield of size one into another bitfield
8518 of size 1. In this case, (unless we need the result of the
8519 assignment) we can do this more efficiently with a
8520 test followed by an assignment, if necessary.
8522 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8523 things change so we do, this code should be enhanced to
8526 && TREE_CODE (lhs) == COMPONENT_REF
8527 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8528 || TREE_CODE (rhs) == BIT_AND_EXPR)
8529 && TREE_OPERAND (rhs, 0) == lhs
8530 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8531 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8532 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8534 rtx label = gen_label_rtx ();
8536 do_jump (TREE_OPERAND (rhs, 1),
8537 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8538 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8539 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8540 (TREE_CODE (rhs) == BIT_IOR_EXPR
8542 : integer_zero_node)),
8544 do_pending_stack_adjust ();
8549 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8550 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8552 = save_noncopied_parts (stabilize_reference (lhs),
8553 TYPE_NONCOPIED_PARTS (lhs_type));
8555 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8556 while (noncopied_parts != 0)
8558 expand_assignment (TREE_PURPOSE (noncopied_parts),
8559 TREE_VALUE (noncopied_parts), 0, 0);
8560 noncopied_parts = TREE_CHAIN (noncopied_parts);
8566 if (!TREE_OPERAND (exp, 0))
8567 expand_null_return ();
8569 expand_return (TREE_OPERAND (exp, 0));
8572 case PREINCREMENT_EXPR:
8573 case PREDECREMENT_EXPR:
8574 return expand_increment (exp, 0, ignore);
8576 case POSTINCREMENT_EXPR:
8577 case POSTDECREMENT_EXPR:
8578 /* Faster to treat as pre-increment if result is not used. */
8579 return expand_increment (exp, ! ignore, ignore);
8582 /* If nonzero, TEMP will be set to the address of something that might
8583 be a MEM corresponding to a stack slot. */
8586 /* Are we taking the address of a nested function? */
8587 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8588 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8589 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8590 && ! TREE_STATIC (exp))
8592 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8593 op0 = force_operand (op0, target);
8595 /* If we are taking the address of something erroneous, just
8597 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8601 /* We make sure to pass const0_rtx down if we came in with
8602 ignore set, to avoid doing the cleanups twice for something. */
8603 op0 = expand_expr (TREE_OPERAND (exp, 0),
8604 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8605 (modifier == EXPAND_INITIALIZER
8606 ? modifier : EXPAND_CONST_ADDRESS));
8608 /* If we are going to ignore the result, OP0 will have been set
8609 to const0_rtx, so just return it. Don't get confused and
8610 think we are taking the address of the constant. */
8614 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8615 clever and returns a REG when given a MEM. */
8616 op0 = protect_from_queue (op0, 1);
8618 /* We would like the object in memory. If it is a constant, we can
8619 have it be statically allocated into memory. For a non-constant,
8620 we need to allocate some memory and store the value into it. */
8622 if (CONSTANT_P (op0))
8623 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8625 else if (GET_CODE (op0) == MEM)
8627 mark_temp_addr_taken (op0);
8628 temp = XEXP (op0, 0);
8631 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8632 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8633 || GET_CODE (op0) == PARALLEL)
8635 /* If this object is in a register, it must be not
8637 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8638 tree nt = build_qualified_type (inner_type,
8639 (TYPE_QUALS (inner_type)
8640 | TYPE_QUAL_CONST));
8641 rtx memloc = assign_temp (nt, 1, 1, 1);
8643 mark_temp_addr_taken (memloc);
8644 if (GET_CODE (op0) == PARALLEL)
8645 /* Handle calls that pass values in multiple non-contiguous
8646 locations. The Irix 6 ABI has examples of this. */
8647 emit_group_store (memloc, op0,
8648 int_size_in_bytes (inner_type),
8649 TYPE_ALIGN (inner_type));
8651 emit_move_insn (memloc, op0);
8655 if (GET_CODE (op0) != MEM)
8658 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8660 temp = XEXP (op0, 0);
8661 #ifdef POINTERS_EXTEND_UNSIGNED
8662 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8663 && mode == ptr_mode)
8664 temp = convert_memory_address (ptr_mode, temp);
8669 op0 = force_operand (XEXP (op0, 0), target);
8672 if (flag_force_addr && GET_CODE (op0) != REG)
8673 op0 = force_reg (Pmode, op0);
8675 if (GET_CODE (op0) == REG
8676 && ! REG_USERVAR_P (op0))
8677 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8679 /* If we might have had a temp slot, add an equivalent address
8682 update_temp_slot_address (temp, op0);
8684 #ifdef POINTERS_EXTEND_UNSIGNED
8685 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8686 && mode == ptr_mode)
8687 op0 = convert_memory_address (ptr_mode, op0);
8692 case ENTRY_VALUE_EXPR:
8695 /* COMPLEX type for Extended Pascal & Fortran */
8698 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8701 /* Get the rtx code of the operands. */
8702 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8703 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8706 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8710 /* Move the real (op0) and imaginary (op1) parts to their location. */
8711 emit_move_insn (gen_realpart (mode, target), op0);
8712 emit_move_insn (gen_imagpart (mode, target), op1);
8714 insns = get_insns ();
8717 /* Complex construction should appear as a single unit. */
8718 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8719 each with a separate pseudo as destination.
8720 It's not correct for flow to treat them as a unit. */
8721 if (GET_CODE (target) != CONCAT)
8722 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8730 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8731 return gen_realpart (mode, op0);
8734 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8735 return gen_imagpart (mode, op0);
8739 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8743 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8746 target = gen_reg_rtx (mode);
8750 /* Store the realpart and the negated imagpart to target. */
8751 emit_move_insn (gen_realpart (partmode, target),
8752 gen_realpart (partmode, op0));
8754 imag_t = gen_imagpart (partmode, target);
8755 temp = expand_unop (partmode,
8756 ! unsignedp && flag_trapv
8757 && (GET_MODE_CLASS(partmode) == MODE_INT)
8758 ? negv_optab : neg_optab,
8759 gen_imagpart (partmode, op0), imag_t, 0);
8761 emit_move_insn (imag_t, temp);
8763 insns = get_insns ();
8766 /* Conjugate should appear as a single unit
8767 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8768 each with a separate pseudo as destination.
8769 It's not correct for flow to treat them as a unit. */
8770 if (GET_CODE (target) != CONCAT)
8771 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8778 case TRY_CATCH_EXPR:
8780 tree handler = TREE_OPERAND (exp, 1);
8782 expand_eh_region_start ();
8784 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8786 expand_eh_region_end_cleanup (handler);
8791 case TRY_FINALLY_EXPR:
8793 tree try_block = TREE_OPERAND (exp, 0);
8794 tree finally_block = TREE_OPERAND (exp, 1);
8795 rtx finally_label = gen_label_rtx ();
8796 rtx done_label = gen_label_rtx ();
8797 rtx return_link = gen_reg_rtx (Pmode);
8798 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8799 (tree) finally_label, (tree) return_link);
8800 TREE_SIDE_EFFECTS (cleanup) = 1;
8802 /* Start a new binding layer that will keep track of all cleanup
8803 actions to be performed. */
8804 expand_start_bindings (2);
8806 target_temp_slot_level = temp_slot_level;
8808 expand_decl_cleanup (NULL_TREE, cleanup);
8809 op0 = expand_expr (try_block, target, tmode, modifier);
8811 preserve_temp_slots (op0);
8812 expand_end_bindings (NULL_TREE, 0, 0);
8813 emit_jump (done_label);
8814 emit_label (finally_label);
8815 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8816 emit_indirect_jump (return_link);
8817 emit_label (done_label);
8821 case GOTO_SUBROUTINE_EXPR:
8823 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8824 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8825 rtx return_address = gen_label_rtx ();
8826 emit_move_insn (return_link,
8827 gen_rtx_LABEL_REF (Pmode, return_address));
8829 emit_label (return_address);
8834 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8837 return get_exception_pointer (cfun);
8840 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8843 /* Here to do an ordinary binary operator, generating an instruction
8844 from the optab already placed in `this_optab'. */
8846 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8848 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8849 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8851 temp = expand_binop (mode, this_optab, op0, op1, target,
8852 unsignedp, OPTAB_LIB_WIDEN);
8858 /* Similar to expand_expr, except that we don't specify a target, target
8859 mode, or modifier and we return the alignment of the inner type. This is
8860 used in cases where it is not necessary to align the result to the
8861 alignment of its type as long as we know the alignment of the result, for
8862 example for comparisons of BLKmode values. */
8865 expand_expr_unaligned (exp, palign)
8867 unsigned int *palign;
8870 tree type = TREE_TYPE (exp);
8871 register enum machine_mode mode = TYPE_MODE (type);
8873 /* Default the alignment we return to that of the type. */
8874 *palign = TYPE_ALIGN (type);
8876 /* The only cases in which we do anything special is if the resulting mode
8878 if (mode != BLKmode)
8879 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8881 switch (TREE_CODE (exp))
8885 case NON_LVALUE_EXPR:
8886 /* Conversions between BLKmode values don't change the underlying
8887 alignment or value. */
8888 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8889 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8893 /* Much of the code for this case is copied directly from expand_expr.
8894 We need to duplicate it here because we will do something different
8895 in the fall-through case, so we need to handle the same exceptions
8898 tree array = TREE_OPERAND (exp, 0);
8899 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8900 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8901 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8904 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8907 /* Optimize the special-case of a zero lower bound.
8909 We convert the low_bound to sizetype to avoid some problems
8910 with constant folding. (E.g. suppose the lower bound is 1,
8911 and its mode is QI. Without the conversion, (ARRAY
8912 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8913 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8915 if (! integer_zerop (low_bound))
8916 index = size_diffop (index, convert (sizetype, low_bound));
8918 /* If this is a constant index into a constant array,
8919 just get the value from the array. Handle both the cases when
8920 we have an explicit constructor and when our operand is a variable
8921 that was declared const. */
8923 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8924 && host_integerp (index, 0)
8925 && 0 > compare_tree_int (index,
8926 list_length (CONSTRUCTOR_ELTS
8927 (TREE_OPERAND (exp, 0)))))
8931 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8932 i = tree_low_cst (index, 0);
8933 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8937 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8940 else if (optimize >= 1
8941 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8942 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8943 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8945 if (TREE_CODE (index) == INTEGER_CST)
8947 tree init = DECL_INITIAL (array);
8949 if (TREE_CODE (init) == CONSTRUCTOR)
8953 for (elem = CONSTRUCTOR_ELTS (init);
8954 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8955 elem = TREE_CHAIN (elem))
8959 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8969 case ARRAY_RANGE_REF:
8970 /* If the operand is a CONSTRUCTOR, we can just extract the
8971 appropriate field if it is present. Don't do this if we have
8972 already written the data since we want to refer to that copy
8973 and varasm.c assumes that's what we'll do. */
8974 if (TREE_CODE (exp) == COMPONENT_REF
8975 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8976 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8980 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8981 elt = TREE_CHAIN (elt))
8982 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8983 /* Note that unlike the case in expand_expr, we know this is
8984 BLKmode and hence not an integer. */
8985 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8989 enum machine_mode mode1;
8990 HOST_WIDE_INT bitsize, bitpos;
8993 unsigned int alignment;
8995 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8996 &mode1, &unsignedp, &volatilep,
8999 /* If we got back the original object, something is wrong. Perhaps
9000 we are evaluating an expression too early. In any event, don't
9001 infinitely recurse. */
9005 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9007 /* If this is a constant, put it into a register if it is a
9008 legitimate constant and OFFSET is 0 and memory if it isn't. */
9009 if (CONSTANT_P (op0))
9011 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9013 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9015 op0 = force_reg (inner_mode, op0);
9017 op0 = validize_mem (force_const_mem (inner_mode, op0));
9022 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9024 /* If this object is in a register, put it into memory.
9025 This case can't occur in C, but can in Ada if we have
9026 unchecked conversion of an expression from a scalar type to
9027 an array or record type. */
9028 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9029 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9031 tree nt = build_qualified_type (TREE_TYPE (tem),
9032 (TYPE_QUALS (TREE_TYPE (tem))
9033 | TYPE_QUAL_CONST));
9034 rtx memloc = assign_temp (nt, 1, 1, 1);
9036 mark_temp_addr_taken (memloc);
9037 emit_move_insn (memloc, op0);
9041 if (GET_CODE (op0) != MEM)
9044 if (GET_MODE (offset_rtx) != ptr_mode)
9046 #ifdef POINTERS_EXTEND_UNSIGNED
9047 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9049 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9053 op0 = change_address (op0, VOIDmode,
9054 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9055 force_reg (ptr_mode,
9059 /* Don't forget about volatility even if this is a bitfield. */
9060 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9062 op0 = copy_rtx (op0);
9063 MEM_VOLATILE_P (op0) = 1;
9066 /* Check the access. */
9067 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9072 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9073 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9075 /* Check the access right of the pointer. */
9076 in_check_memory_usage = 1;
9077 if (size > BITS_PER_UNIT)
9078 emit_library_call (chkr_check_addr_libfunc,
9079 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9080 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9081 TYPE_MODE (sizetype),
9082 GEN_INT (MEMORY_USE_RO),
9083 TYPE_MODE (integer_type_node));
9084 in_check_memory_usage = 0;
9087 /* In cases where an aligned union has an unaligned object
9088 as a field, we might be extracting a BLKmode value from
9089 an integer-mode (e.g., SImode) object. Handle this case
9090 by doing the extract into an object as wide as the field
9091 (which we know to be the width of a basic mode), then
9092 storing into memory, and changing the mode to BLKmode.
9093 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9094 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9095 if (mode1 == VOIDmode
9096 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9097 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9098 && (TYPE_ALIGN (type) > alignment
9099 || bitpos % TYPE_ALIGN (type) != 0)))
9101 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9103 if (ext_mode == BLKmode)
9105 /* In this case, BITPOS must start at a byte boundary. */
9106 if (GET_CODE (op0) != MEM
9107 || bitpos % BITS_PER_UNIT != 0)
9110 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9114 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9116 rtx new = assign_temp (nt, 0, 1, 1);
9118 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9119 unsignedp, NULL_RTX, ext_mode,
9120 ext_mode, alignment,
9121 int_size_in_bytes (TREE_TYPE (tem)));
9123 /* If the result is a record type and BITSIZE is narrower than
9124 the mode of OP0, an integral mode, and this is a big endian
9125 machine, we must put the field into the high-order bits. */
9126 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9127 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9128 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9129 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9130 size_int (GET_MODE_BITSIZE
9135 emit_move_insn (new, op0);
9136 op0 = copy_rtx (new);
9137 PUT_MODE (op0, BLKmode);
9141 /* Get a reference to just this component. */
9142 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9144 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9146 /* Adjust the alignment in case the bit position is not
9147 a multiple of the alignment of the inner object. */
9148 while (bitpos % alignment != 0)
9151 if (GET_CODE (XEXP (op0, 0)) == REG)
9152 mark_reg_pointer (XEXP (op0, 0), alignment);
9154 MEM_IN_STRUCT_P (op0) = 1;
9155 MEM_VOLATILE_P (op0) |= volatilep;
9157 *palign = alignment;
9166 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9169 /* Return the tree node if a ARG corresponds to a string constant or zero
9170 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9171 in bytes within the string that ARG is accessing. The type of the
9172 offset will be `sizetype'. */
9175 string_constant (arg, ptr_offset)
9181 if (TREE_CODE (arg) == ADDR_EXPR
9182 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9184 *ptr_offset = size_zero_node;
9185 return TREE_OPERAND (arg, 0);
9187 else if (TREE_CODE (arg) == PLUS_EXPR)
9189 tree arg0 = TREE_OPERAND (arg, 0);
9190 tree arg1 = TREE_OPERAND (arg, 1);
9195 if (TREE_CODE (arg0) == ADDR_EXPR
9196 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9198 *ptr_offset = convert (sizetype, arg1);
9199 return TREE_OPERAND (arg0, 0);
9201 else if (TREE_CODE (arg1) == ADDR_EXPR
9202 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9204 *ptr_offset = convert (sizetype, arg0);
9205 return TREE_OPERAND (arg1, 0);
9212 /* Expand code for a post- or pre- increment or decrement
9213 and return the RTX for the result.
9214 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9217 expand_increment (exp, post, ignore)
9221 register rtx op0, op1;
9222 register rtx temp, value;
9223 register tree incremented = TREE_OPERAND (exp, 0);
9224 optab this_optab = add_optab;
9226 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9227 int op0_is_copy = 0;
9228 int single_insn = 0;
9229 /* 1 means we can't store into OP0 directly,
9230 because it is a subreg narrower than a word,
9231 and we don't dare clobber the rest of the word. */
9234 /* Stabilize any component ref that might need to be
9235 evaluated more than once below. */
9237 || TREE_CODE (incremented) == BIT_FIELD_REF
9238 || (TREE_CODE (incremented) == COMPONENT_REF
9239 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9240 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9241 incremented = stabilize_reference (incremented);
9242 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9243 ones into save exprs so that they don't accidentally get evaluated
9244 more than once by the code below. */
9245 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9246 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9247 incremented = save_expr (incremented);
9249 /* Compute the operands as RTX.
9250 Note whether OP0 is the actual lvalue or a copy of it:
9251 I believe it is a copy iff it is a register or subreg
9252 and insns were generated in computing it. */
9254 temp = get_last_insn ();
9255 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9257 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9258 in place but instead must do sign- or zero-extension during assignment,
9259 so we copy it into a new register and let the code below use it as
9262 Note that we can safely modify this SUBREG since it is know not to be
9263 shared (it was made by the expand_expr call above). */
9265 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9268 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9272 else if (GET_CODE (op0) == SUBREG
9273 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9275 /* We cannot increment this SUBREG in place. If we are
9276 post-incrementing, get a copy of the old value. Otherwise,
9277 just mark that we cannot increment in place. */
9279 op0 = copy_to_reg (op0);
9284 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9285 && temp != get_last_insn ());
9286 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9287 EXPAND_MEMORY_USE_BAD);
9289 /* Decide whether incrementing or decrementing. */
9290 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9291 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9292 this_optab = sub_optab;
9294 /* Convert decrement by a constant into a negative increment. */
9295 if (this_optab == sub_optab
9296 && GET_CODE (op1) == CONST_INT)
9298 op1 = GEN_INT (-INTVAL (op1));
9299 this_optab = add_optab;
9302 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9303 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9305 /* For a preincrement, see if we can do this with a single instruction. */
9308 icode = (int) this_optab->handlers[(int) mode].insn_code;
9309 if (icode != (int) CODE_FOR_nothing
9310 /* Make sure that OP0 is valid for operands 0 and 1
9311 of the insn we want to queue. */
9312 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9313 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9314 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9318 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9319 then we cannot just increment OP0. We must therefore contrive to
9320 increment the original value. Then, for postincrement, we can return
9321 OP0 since it is a copy of the old value. For preincrement, expand here
9322 unless we can do it with a single insn.
9324 Likewise if storing directly into OP0 would clobber high bits
9325 we need to preserve (bad_subreg). */
9326 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9328 /* This is the easiest way to increment the value wherever it is.
9329 Problems with multiple evaluation of INCREMENTED are prevented
9330 because either (1) it is a component_ref or preincrement,
9331 in which case it was stabilized above, or (2) it is an array_ref
9332 with constant index in an array in a register, which is
9333 safe to reevaluate. */
9334 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9335 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9336 ? MINUS_EXPR : PLUS_EXPR),
9339 TREE_OPERAND (exp, 1));
9341 while (TREE_CODE (incremented) == NOP_EXPR
9342 || TREE_CODE (incremented) == CONVERT_EXPR)
9344 newexp = convert (TREE_TYPE (incremented), newexp);
9345 incremented = TREE_OPERAND (incremented, 0);
9348 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9349 return post ? op0 : temp;
9354 /* We have a true reference to the value in OP0.
9355 If there is an insn to add or subtract in this mode, queue it.
9356 Queueing the increment insn avoids the register shuffling
9357 that often results if we must increment now and first save
9358 the old value for subsequent use. */
9360 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9361 op0 = stabilize (op0);
9364 icode = (int) this_optab->handlers[(int) mode].insn_code;
9365 if (icode != (int) CODE_FOR_nothing
9366 /* Make sure that OP0 is valid for operands 0 and 1
9367 of the insn we want to queue. */
9368 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9369 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9371 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9372 op1 = force_reg (mode, op1);
9374 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9376 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9378 rtx addr = (general_operand (XEXP (op0, 0), mode)
9379 ? force_reg (Pmode, XEXP (op0, 0))
9380 : copy_to_reg (XEXP (op0, 0)));
9383 op0 = replace_equiv_address (op0, addr);
9384 temp = force_reg (GET_MODE (op0), op0);
9385 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9386 op1 = force_reg (mode, op1);
9388 /* The increment queue is LIFO, thus we have to `queue'
9389 the instructions in reverse order. */
9390 enqueue_insn (op0, gen_move_insn (op0, temp));
9391 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9396 /* Preincrement, or we can't increment with one simple insn. */
9398 /* Save a copy of the value before inc or dec, to return it later. */
9399 temp = value = copy_to_reg (op0);
9401 /* Arrange to return the incremented value. */
9402 /* Copy the rtx because expand_binop will protect from the queue,
9403 and the results of that would be invalid for us to return
9404 if our caller does emit_queue before using our result. */
9405 temp = copy_rtx (value = op0);
9407 /* Increment however we can. */
9408 op1 = expand_binop (mode, this_optab, value, op1,
9409 current_function_check_memory_usage ? NULL_RTX : op0,
9410 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9411 /* Make sure the value is stored into OP0. */
9413 emit_move_insn (op0, op1);
9418 /* At the start of a function, record that we have no previously-pushed
9419 arguments waiting to be popped. */
9422 init_pending_stack_adjust ()
9424 pending_stack_adjust = 0;
9427 /* When exiting from function, if safe, clear out any pending stack adjust
9428 so the adjustment won't get done.
9430 Note, if the current function calls alloca, then it must have a
9431 frame pointer regardless of the value of flag_omit_frame_pointer. */
9434 clear_pending_stack_adjust ()
9436 #ifdef EXIT_IGNORE_STACK
9438 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9439 && EXIT_IGNORE_STACK
9440 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9441 && ! flag_inline_functions)
9443 stack_pointer_delta -= pending_stack_adjust,
9444 pending_stack_adjust = 0;
9449 /* Pop any previously-pushed arguments that have not been popped yet. */
9452 do_pending_stack_adjust ()
9454 if (inhibit_defer_pop == 0)
9456 if (pending_stack_adjust != 0)
9457 adjust_stack (GEN_INT (pending_stack_adjust));
9458 pending_stack_adjust = 0;
9462 /* Expand conditional expressions. */
9464 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9465 LABEL is an rtx of code CODE_LABEL, in this function and all the
9469 jumpifnot (exp, label)
9473 do_jump (exp, label, NULL_RTX);
9476 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9483 do_jump (exp, NULL_RTX, label);
9486 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9487 the result is zero, or IF_TRUE_LABEL if the result is one.
9488 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9489 meaning fall through in that case.
9491 do_jump always does any pending stack adjust except when it does not
9492 actually perform a jump. An example where there is no jump
9493 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9495 This function is responsible for optimizing cases such as
9496 &&, || and comparison operators in EXP. */
9499 do_jump (exp, if_false_label, if_true_label)
9501 rtx if_false_label, if_true_label;
9503 register enum tree_code code = TREE_CODE (exp);
9504 /* Some cases need to create a label to jump to
9505 in order to properly fall through.
9506 These cases set DROP_THROUGH_LABEL nonzero. */
9507 rtx drop_through_label = 0;
9511 enum machine_mode mode;
9513 #ifdef MAX_INTEGER_COMPUTATION_MODE
9514 check_max_integer_computation_mode (exp);
9525 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9531 /* This is not true with #pragma weak */
9533 /* The address of something can never be zero. */
9535 emit_jump (if_true_label);
9540 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9541 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9542 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9543 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9546 /* If we are narrowing the operand, we have to do the compare in the
9548 if ((TYPE_PRECISION (TREE_TYPE (exp))
9549 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9551 case NON_LVALUE_EXPR:
9552 case REFERENCE_EXPR:
9557 /* These cannot change zero->non-zero or vice versa. */
9558 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9561 case WITH_RECORD_EXPR:
9562 /* Put the object on the placeholder list, recurse through our first
9563 operand, and pop the list. */
9564 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9566 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9567 placeholder_list = TREE_CHAIN (placeholder_list);
9571 /* This is never less insns than evaluating the PLUS_EXPR followed by
9572 a test and can be longer if the test is eliminated. */
9574 /* Reduce to minus. */
9575 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9576 TREE_OPERAND (exp, 0),
9577 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9578 TREE_OPERAND (exp, 1))));
9579 /* Process as MINUS. */
9583 /* Non-zero iff operands of minus differ. */
9584 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9585 TREE_OPERAND (exp, 0),
9586 TREE_OPERAND (exp, 1)),
9587 NE, NE, if_false_label, if_true_label);
9591 /* If we are AND'ing with a small constant, do this comparison in the
9592 smallest type that fits. If the machine doesn't have comparisons
9593 that small, it will be converted back to the wider comparison.
9594 This helps if we are testing the sign bit of a narrower object.
9595 combine can't do this for us because it can't know whether a
9596 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9598 if (! SLOW_BYTE_ACCESS
9599 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9600 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9601 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9602 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9603 && (type = type_for_mode (mode, 1)) != 0
9604 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9605 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9606 != CODE_FOR_nothing))
9608 do_jump (convert (type, exp), if_false_label, if_true_label);
9613 case TRUTH_NOT_EXPR:
9614 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9617 case TRUTH_ANDIF_EXPR:
9618 if (if_false_label == 0)
9619 if_false_label = drop_through_label = gen_label_rtx ();
9620 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9621 start_cleanup_deferral ();
9622 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9623 end_cleanup_deferral ();
9626 case TRUTH_ORIF_EXPR:
9627 if (if_true_label == 0)
9628 if_true_label = drop_through_label = gen_label_rtx ();
9629 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9630 start_cleanup_deferral ();
9631 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9632 end_cleanup_deferral ();
9637 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9638 preserve_temp_slots (NULL_RTX);
9642 do_pending_stack_adjust ();
9643 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9649 case ARRAY_RANGE_REF:
9651 HOST_WIDE_INT bitsize, bitpos;
9653 enum machine_mode mode;
9657 unsigned int alignment;
9659 /* Get description of this reference. We don't actually care
9660 about the underlying object here. */
9661 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9662 &unsignedp, &volatilep, &alignment);
9664 type = type_for_size (bitsize, unsignedp);
9665 if (! SLOW_BYTE_ACCESS
9666 && type != 0 && bitsize >= 0
9667 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9668 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9669 != CODE_FOR_nothing))
9671 do_jump (convert (type, exp), if_false_label, if_true_label);
9678 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9679 if (integer_onep (TREE_OPERAND (exp, 1))
9680 && integer_zerop (TREE_OPERAND (exp, 2)))
9681 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9683 else if (integer_zerop (TREE_OPERAND (exp, 1))
9684 && integer_onep (TREE_OPERAND (exp, 2)))
9685 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9689 register rtx label1 = gen_label_rtx ();
9690 drop_through_label = gen_label_rtx ();
9692 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9694 start_cleanup_deferral ();
9695 /* Now the THEN-expression. */
9696 do_jump (TREE_OPERAND (exp, 1),
9697 if_false_label ? if_false_label : drop_through_label,
9698 if_true_label ? if_true_label : drop_through_label);
9699 /* In case the do_jump just above never jumps. */
9700 do_pending_stack_adjust ();
9701 emit_label (label1);
9703 /* Now the ELSE-expression. */
9704 do_jump (TREE_OPERAND (exp, 2),
9705 if_false_label ? if_false_label : drop_through_label,
9706 if_true_label ? if_true_label : drop_through_label);
9707 end_cleanup_deferral ();
9713 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9715 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9716 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9718 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9719 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9722 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9723 fold (build (EQ_EXPR, TREE_TYPE (exp),
9724 fold (build1 (REALPART_EXPR,
9725 TREE_TYPE (inner_type),
9727 fold (build1 (REALPART_EXPR,
9728 TREE_TYPE (inner_type),
9730 fold (build (EQ_EXPR, TREE_TYPE (exp),
9731 fold (build1 (IMAGPART_EXPR,
9732 TREE_TYPE (inner_type),
9734 fold (build1 (IMAGPART_EXPR,
9735 TREE_TYPE (inner_type),
9737 if_false_label, if_true_label);
9740 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9741 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9743 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9744 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9745 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9747 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9753 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9755 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9756 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9758 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9759 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9762 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9763 fold (build (NE_EXPR, TREE_TYPE (exp),
9764 fold (build1 (REALPART_EXPR,
9765 TREE_TYPE (inner_type),
9767 fold (build1 (REALPART_EXPR,
9768 TREE_TYPE (inner_type),
9770 fold (build (NE_EXPR, TREE_TYPE (exp),
9771 fold (build1 (IMAGPART_EXPR,
9772 TREE_TYPE (inner_type),
9774 fold (build1 (IMAGPART_EXPR,
9775 TREE_TYPE (inner_type),
9777 if_false_label, if_true_label);
9780 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9781 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9783 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9784 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9785 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9787 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9792 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9793 if (GET_MODE_CLASS (mode) == MODE_INT
9794 && ! can_compare_p (LT, mode, ccp_jump))
9795 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9797 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9801 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9802 if (GET_MODE_CLASS (mode) == MODE_INT
9803 && ! can_compare_p (LE, mode, ccp_jump))
9804 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9806 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9810 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9811 if (GET_MODE_CLASS (mode) == MODE_INT
9812 && ! can_compare_p (GT, mode, ccp_jump))
9813 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9815 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9819 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9820 if (GET_MODE_CLASS (mode) == MODE_INT
9821 && ! can_compare_p (GE, mode, ccp_jump))
9822 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9824 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9827 case UNORDERED_EXPR:
9830 enum rtx_code cmp, rcmp;
9833 if (code == UNORDERED_EXPR)
9834 cmp = UNORDERED, rcmp = ORDERED;
9836 cmp = ORDERED, rcmp = UNORDERED;
9837 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9840 if (! can_compare_p (cmp, mode, ccp_jump)
9841 && (can_compare_p (rcmp, mode, ccp_jump)
9842 /* If the target doesn't provide either UNORDERED or ORDERED
9843 comparisons, canonicalize on UNORDERED for the library. */
9844 || rcmp == UNORDERED))
9848 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9850 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9855 enum rtx_code rcode1;
9856 enum tree_code tcode2;
9880 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9881 if (can_compare_p (rcode1, mode, ccp_jump))
9882 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9886 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9887 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9890 /* If the target doesn't support combined unordered
9891 compares, decompose into UNORDERED + comparison. */
9892 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9893 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9894 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9895 do_jump (exp, if_false_label, if_true_label);
9901 __builtin_expect (<test>, 0) and
9902 __builtin_expect (<test>, 1)
9904 We need to do this here, so that <test> is not converted to a SCC
9905 operation on machines that use condition code registers and COMPARE
9906 like the PowerPC, and then the jump is done based on whether the SCC
9907 operation produced a 1 or 0. */
9909 /* Check for a built-in function. */
9910 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9912 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9913 tree arglist = TREE_OPERAND (exp, 1);
9915 if (TREE_CODE (fndecl) == FUNCTION_DECL
9916 && DECL_BUILT_IN (fndecl)
9917 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9918 && arglist != NULL_TREE
9919 && TREE_CHAIN (arglist) != NULL_TREE)
9921 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9924 if (seq != NULL_RTX)
9931 /* fall through and generate the normal code. */
9935 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9937 /* This is not needed any more and causes poor code since it causes
9938 comparisons and tests from non-SI objects to have different code
9940 /* Copy to register to avoid generating bad insns by cse
9941 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9942 if (!cse_not_expected && GET_CODE (temp) == MEM)
9943 temp = copy_to_reg (temp);
9945 do_pending_stack_adjust ();
9946 /* Do any postincrements in the expression that was tested. */
9949 if (GET_CODE (temp) == CONST_INT
9950 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9951 || GET_CODE (temp) == LABEL_REF)
9953 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9957 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9958 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9959 /* Note swapping the labels gives us not-equal. */
9960 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9961 else if (GET_MODE (temp) != VOIDmode)
9962 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9963 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9964 GET_MODE (temp), NULL_RTX, 0,
9965 if_false_label, if_true_label);
9970 if (drop_through_label)
9972 /* If do_jump produces code that might be jumped around,
9973 do any stack adjusts from that code, before the place
9974 where control merges in. */
9975 do_pending_stack_adjust ();
9976 emit_label (drop_through_label);
9980 /* Given a comparison expression EXP for values too wide to be compared
9981 with one insn, test the comparison and jump to the appropriate label.
9982 The code of EXP is ignored; we always test GT if SWAP is 0,
9983 and LT if SWAP is 1. */
9986 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9989 rtx if_false_label, if_true_label;
9991 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9992 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9993 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9994 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9996 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9999 /* Compare OP0 with OP1, word at a time, in mode MODE.
10000 UNSIGNEDP says to do unsigned comparison.
10001 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10004 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10005 enum machine_mode mode;
10008 rtx if_false_label, if_true_label;
10010 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10011 rtx drop_through_label = 0;
10014 if (! if_true_label || ! if_false_label)
10015 drop_through_label = gen_label_rtx ();
10016 if (! if_true_label)
10017 if_true_label = drop_through_label;
10018 if (! if_false_label)
10019 if_false_label = drop_through_label;
10021 /* Compare a word at a time, high order first. */
10022 for (i = 0; i < nwords; i++)
10024 rtx op0_word, op1_word;
10026 if (WORDS_BIG_ENDIAN)
10028 op0_word = operand_subword_force (op0, i, mode);
10029 op1_word = operand_subword_force (op1, i, mode);
10033 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10034 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10037 /* All but high-order word must be compared as unsigned. */
10038 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10039 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10040 NULL_RTX, if_true_label);
10042 /* Consider lower words only if these are equal. */
10043 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10044 NULL_RTX, 0, NULL_RTX, if_false_label);
10047 if (if_false_label)
10048 emit_jump (if_false_label);
10049 if (drop_through_label)
10050 emit_label (drop_through_label);
10053 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10054 with one insn, test the comparison and jump to the appropriate label. */
10057 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10059 rtx if_false_label, if_true_label;
10061 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10062 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10063 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10064 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10066 rtx drop_through_label = 0;
10068 if (! if_false_label)
10069 drop_through_label = if_false_label = gen_label_rtx ();
10071 for (i = 0; i < nwords; i++)
10072 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10073 operand_subword_force (op1, i, mode),
10074 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10075 word_mode, NULL_RTX, 0, if_false_label,
10079 emit_jump (if_true_label);
10080 if (drop_through_label)
10081 emit_label (drop_through_label);
10084 /* Jump according to whether OP0 is 0.
10085 We assume that OP0 has an integer mode that is too wide
10086 for the available compare insns. */
10089 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10091 rtx if_false_label, if_true_label;
10093 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10096 rtx drop_through_label = 0;
10098 /* The fastest way of doing this comparison on almost any machine is to
10099 "or" all the words and compare the result. If all have to be loaded
10100 from memory and this is a very wide item, it's possible this may
10101 be slower, but that's highly unlikely. */
10103 part = gen_reg_rtx (word_mode);
10104 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10105 for (i = 1; i < nwords && part != 0; i++)
10106 part = expand_binop (word_mode, ior_optab, part,
10107 operand_subword_force (op0, i, GET_MODE (op0)),
10108 part, 1, OPTAB_WIDEN);
10112 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10113 NULL_RTX, 0, if_false_label, if_true_label);
10118 /* If we couldn't do the "or" simply, do this with a series of compares. */
10119 if (! if_false_label)
10120 drop_through_label = if_false_label = gen_label_rtx ();
10122 for (i = 0; i < nwords; i++)
10123 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10124 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10125 if_false_label, NULL_RTX);
10128 emit_jump (if_true_label);
10130 if (drop_through_label)
10131 emit_label (drop_through_label);
10134 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10135 (including code to compute the values to be compared)
10136 and set (CC0) according to the result.
10137 The decision as to signed or unsigned comparison must be made by the caller.
10139 We force a stack adjustment unless there are currently
10140 things pushed on the stack that aren't yet used.
10142 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10145 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10146 size of MODE should be used. */
10149 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10150 register rtx op0, op1;
10151 enum rtx_code code;
10153 enum machine_mode mode;
10155 unsigned int align;
10159 /* If one operand is constant, make it the second one. Only do this
10160 if the other operand is not constant as well. */
10162 if (swap_commutative_operands_p (op0, op1))
10167 code = swap_condition (code);
10170 if (flag_force_mem)
10172 op0 = force_not_mem (op0);
10173 op1 = force_not_mem (op1);
10176 do_pending_stack_adjust ();
10178 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10179 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10183 /* There's no need to do this now that combine.c can eliminate lots of
10184 sign extensions. This can be less efficient in certain cases on other
10187 /* If this is a signed equality comparison, we can do it as an
10188 unsigned comparison since zero-extension is cheaper than sign
10189 extension and comparisons with zero are done as unsigned. This is
10190 the case even on machines that can do fast sign extension, since
10191 zero-extension is easier to combine with other operations than
10192 sign-extension is. If we are comparing against a constant, we must
10193 convert it to what it would look like unsigned. */
10194 if ((code == EQ || code == NE) && ! unsignedp
10195 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10197 if (GET_CODE (op1) == CONST_INT
10198 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10199 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10204 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10206 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10209 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10210 The decision as to signed or unsigned comparison must be made by the caller.
10212 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10215 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10216 size of MODE should be used. */
10219 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10220 if_false_label, if_true_label)
10221 register rtx op0, op1;
10222 enum rtx_code code;
10224 enum machine_mode mode;
10226 unsigned int align;
10227 rtx if_false_label, if_true_label;
10230 int dummy_true_label = 0;
10232 /* Reverse the comparison if that is safe and we want to jump if it is
10234 if (! if_true_label && ! FLOAT_MODE_P (mode))
10236 if_true_label = if_false_label;
10237 if_false_label = 0;
10238 code = reverse_condition (code);
10241 /* If one operand is constant, make it the second one. Only do this
10242 if the other operand is not constant as well. */
10244 if (swap_commutative_operands_p (op0, op1))
10249 code = swap_condition (code);
10252 if (flag_force_mem)
10254 op0 = force_not_mem (op0);
10255 op1 = force_not_mem (op1);
10258 do_pending_stack_adjust ();
10260 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10261 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10263 if (tem == const_true_rtx)
10266 emit_jump (if_true_label);
10270 if (if_false_label)
10271 emit_jump (if_false_label);
10277 /* There's no need to do this now that combine.c can eliminate lots of
10278 sign extensions. This can be less efficient in certain cases on other
10281 /* If this is a signed equality comparison, we can do it as an
10282 unsigned comparison since zero-extension is cheaper than sign
10283 extension and comparisons with zero are done as unsigned. This is
10284 the case even on machines that can do fast sign extension, since
10285 zero-extension is easier to combine with other operations than
10286 sign-extension is. If we are comparing against a constant, we must
10287 convert it to what it would look like unsigned. */
10288 if ((code == EQ || code == NE) && ! unsignedp
10289 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10291 if (GET_CODE (op1) == CONST_INT
10292 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10293 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10298 if (! if_true_label)
10300 dummy_true_label = 1;
10301 if_true_label = gen_label_rtx ();
10304 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10307 if (if_false_label)
10308 emit_jump (if_false_label);
10309 if (dummy_true_label)
10310 emit_label (if_true_label);
10313 /* Generate code for a comparison expression EXP (including code to compute
10314 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10315 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10316 generated code will drop through.
10317 SIGNED_CODE should be the rtx operation for this comparison for
10318 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10320 We force a stack adjustment unless there are currently
10321 things pushed on the stack that aren't yet used. */
10324 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10327 enum rtx_code signed_code, unsigned_code;
10328 rtx if_false_label, if_true_label;
10330 unsigned int align0, align1;
10331 register rtx op0, op1;
10332 register tree type;
10333 register enum machine_mode mode;
10335 enum rtx_code code;
10337 /* Don't crash if the comparison was erroneous. */
10338 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10339 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10342 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10343 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10346 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10347 mode = TYPE_MODE (type);
10348 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10349 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10350 || (GET_MODE_BITSIZE (mode)
10351 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10354 /* op0 might have been replaced by promoted constant, in which
10355 case the type of second argument should be used. */
10356 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10357 mode = TYPE_MODE (type);
10359 unsignedp = TREE_UNSIGNED (type);
10360 code = unsignedp ? unsigned_code : signed_code;
10362 #ifdef HAVE_canonicalize_funcptr_for_compare
10363 /* If function pointers need to be "canonicalized" before they can
10364 be reliably compared, then canonicalize them. */
10365 if (HAVE_canonicalize_funcptr_for_compare
10366 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10367 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10370 rtx new_op0 = gen_reg_rtx (mode);
10372 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10376 if (HAVE_canonicalize_funcptr_for_compare
10377 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10378 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10381 rtx new_op1 = gen_reg_rtx (mode);
10383 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10388 /* Do any postincrements in the expression that was tested. */
10391 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10393 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10394 MIN (align0, align1),
10395 if_false_label, if_true_label);
10398 /* Generate code to calculate EXP using a store-flag instruction
10399 and return an rtx for the result. EXP is either a comparison
10400 or a TRUTH_NOT_EXPR whose operand is a comparison.
10402 If TARGET is nonzero, store the result there if convenient.
10404 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10407 Return zero if there is no suitable set-flag instruction
10408 available on this machine.
10410 Once expand_expr has been called on the arguments of the comparison,
10411 we are committed to doing the store flag, since it is not safe to
10412 re-evaluate the expression. We emit the store-flag insn by calling
10413 emit_store_flag, but only expand the arguments if we have a reason
10414 to believe that emit_store_flag will be successful. If we think that
10415 it will, but it isn't, we have to simulate the store-flag with a
10416 set/jump/set sequence. */
10419 do_store_flag (exp, target, mode, only_cheap)
10422 enum machine_mode mode;
10425 enum rtx_code code;
10426 tree arg0, arg1, type;
10428 enum machine_mode operand_mode;
10432 enum insn_code icode;
10433 rtx subtarget = target;
10436 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10437 result at the end. We can't simply invert the test since it would
10438 have already been inverted if it were valid. This case occurs for
10439 some floating-point comparisons. */
10441 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10442 invert = 1, exp = TREE_OPERAND (exp, 0);
10444 arg0 = TREE_OPERAND (exp, 0);
10445 arg1 = TREE_OPERAND (exp, 1);
10447 /* Don't crash if the comparison was erroneous. */
10448 if (arg0 == error_mark_node || arg1 == error_mark_node)
10451 type = TREE_TYPE (arg0);
10452 operand_mode = TYPE_MODE (type);
10453 unsignedp = TREE_UNSIGNED (type);
10455 /* We won't bother with BLKmode store-flag operations because it would mean
10456 passing a lot of information to emit_store_flag. */
10457 if (operand_mode == BLKmode)
10460 /* We won't bother with store-flag operations involving function pointers
10461 when function pointers must be canonicalized before comparisons. */
10462 #ifdef HAVE_canonicalize_funcptr_for_compare
10463 if (HAVE_canonicalize_funcptr_for_compare
10464 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10465 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10467 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10468 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10469 == FUNCTION_TYPE))))
10476 /* Get the rtx comparison code to use. We know that EXP is a comparison
10477 operation of some type. Some comparisons against 1 and -1 can be
10478 converted to comparisons with zero. Do so here so that the tests
10479 below will be aware that we have a comparison with zero. These
10480 tests will not catch constants in the first operand, but constants
10481 are rarely passed as the first operand. */
10483 switch (TREE_CODE (exp))
10492 if (integer_onep (arg1))
10493 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10495 code = unsignedp ? LTU : LT;
10498 if (! unsignedp && integer_all_onesp (arg1))
10499 arg1 = integer_zero_node, code = LT;
10501 code = unsignedp ? LEU : LE;
10504 if (! unsignedp && integer_all_onesp (arg1))
10505 arg1 = integer_zero_node, code = GE;
10507 code = unsignedp ? GTU : GT;
10510 if (integer_onep (arg1))
10511 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10513 code = unsignedp ? GEU : GE;
10516 case UNORDERED_EXPR:
10542 /* Put a constant second. */
10543 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10545 tem = arg0; arg0 = arg1; arg1 = tem;
10546 code = swap_condition (code);
10549 /* If this is an equality or inequality test of a single bit, we can
10550 do this by shifting the bit being tested to the low-order bit and
10551 masking the result with the constant 1. If the condition was EQ,
10552 we xor it with 1. This does not require an scc insn and is faster
10553 than an scc insn even if we have it. */
10555 if ((code == NE || code == EQ)
10556 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10557 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10559 tree inner = TREE_OPERAND (arg0, 0);
10560 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10563 /* If INNER is a right shift of a constant and it plus BITNUM does
10564 not overflow, adjust BITNUM and INNER. */
10566 if (TREE_CODE (inner) == RSHIFT_EXPR
10567 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10568 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10569 && bitnum < TYPE_PRECISION (type)
10570 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10571 bitnum - TYPE_PRECISION (type)))
10573 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10574 inner = TREE_OPERAND (inner, 0);
10577 /* If we are going to be able to omit the AND below, we must do our
10578 operations as unsigned. If we must use the AND, we have a choice.
10579 Normally unsigned is faster, but for some machines signed is. */
10580 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10581 #ifdef LOAD_EXTEND_OP
10582 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10588 if (! get_subtarget (subtarget)
10589 || GET_MODE (subtarget) != operand_mode
10590 || ! safe_from_p (subtarget, inner, 1))
10593 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10596 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10597 size_int (bitnum), subtarget, ops_unsignedp);
10599 if (GET_MODE (op0) != mode)
10600 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10602 if ((code == EQ && ! invert) || (code == NE && invert))
10603 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10604 ops_unsignedp, OPTAB_LIB_WIDEN);
10606 /* Put the AND last so it can combine with more things. */
10607 if (bitnum != TYPE_PRECISION (type) - 1)
10608 op0 = expand_and (op0, const1_rtx, subtarget);
10613 /* Now see if we are likely to be able to do this. Return if not. */
10614 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10617 icode = setcc_gen_code[(int) code];
10618 if (icode == CODE_FOR_nothing
10619 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10621 /* We can only do this if it is one of the special cases that
10622 can be handled without an scc insn. */
10623 if ((code == LT && integer_zerop (arg1))
10624 || (! only_cheap && code == GE && integer_zerop (arg1)))
10626 else if (BRANCH_COST >= 0
10627 && ! only_cheap && (code == NE || code == EQ)
10628 && TREE_CODE (type) != REAL_TYPE
10629 && ((abs_optab->handlers[(int) operand_mode].insn_code
10630 != CODE_FOR_nothing)
10631 || (ffs_optab->handlers[(int) operand_mode].insn_code
10632 != CODE_FOR_nothing)))
10638 if (! get_subtarget (target)
10639 || GET_MODE (subtarget) != operand_mode
10640 || ! safe_from_p (subtarget, arg1, 1))
10643 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10644 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10647 target = gen_reg_rtx (mode);
10649 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10650 because, if the emit_store_flag does anything it will succeed and
10651 OP0 and OP1 will not be used subsequently. */
10653 result = emit_store_flag (target, code,
10654 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10655 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10656 operand_mode, unsignedp, 1);
10661 result = expand_binop (mode, xor_optab, result, const1_rtx,
10662 result, 0, OPTAB_LIB_WIDEN);
10666 /* If this failed, we have to do this with set/compare/jump/set code. */
10667 if (GET_CODE (target) != REG
10668 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10669 target = gen_reg_rtx (GET_MODE (target));
10671 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10672 result = compare_from_rtx (op0, op1, code, unsignedp,
10673 operand_mode, NULL_RTX, 0);
10674 if (GET_CODE (result) == CONST_INT)
10675 return (((result == const0_rtx && ! invert)
10676 || (result != const0_rtx && invert))
10677 ? const0_rtx : const1_rtx);
10679 label = gen_label_rtx ();
10680 if (bcc_gen_fctn[(int) code] == 0)
10683 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10684 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10685 emit_label (label);
10690 /* Generate a tablejump instruction (used for switch statements). */
10692 #ifdef HAVE_tablejump
10694 /* INDEX is the value being switched on, with the lowest value
10695 in the table already subtracted.
10696 MODE is its expected mode (needed if INDEX is constant).
10697 RANGE is the length of the jump table.
10698 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10700 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10701 index value is out of range. */
10704 do_tablejump (index, mode, range, table_label, default_label)
10705 rtx index, range, table_label, default_label;
10706 enum machine_mode mode;
10708 register rtx temp, vector;
10710 /* Do an unsigned comparison (in the proper mode) between the index
10711 expression and the value which represents the length of the range.
10712 Since we just finished subtracting the lower bound of the range
10713 from the index expression, this comparison allows us to simultaneously
10714 check that the original index expression value is both greater than
10715 or equal to the minimum value of the range and less than or equal to
10716 the maximum value of the range. */
10718 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10721 /* If index is in range, it must fit in Pmode.
10722 Convert to Pmode so we can index with it. */
10724 index = convert_to_mode (Pmode, index, 1);
10726 /* Don't let a MEM slip thru, because then INDEX that comes
10727 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10728 and break_out_memory_refs will go to work on it and mess it up. */
10729 #ifdef PIC_CASE_VECTOR_ADDRESS
10730 if (flag_pic && GET_CODE (index) != REG)
10731 index = copy_to_mode_reg (Pmode, index);
10734 /* If flag_force_addr were to affect this address
10735 it could interfere with the tricky assumptions made
10736 about addresses that contain label-refs,
10737 which may be valid only very near the tablejump itself. */
10738 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10739 GET_MODE_SIZE, because this indicates how large insns are. The other
10740 uses should all be Pmode, because they are addresses. This code
10741 could fail if addresses and insns are not the same size. */
10742 index = gen_rtx_PLUS (Pmode,
10743 gen_rtx_MULT (Pmode, index,
10744 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10745 gen_rtx_LABEL_REF (Pmode, table_label));
10746 #ifdef PIC_CASE_VECTOR_ADDRESS
10748 index = PIC_CASE_VECTOR_ADDRESS (index);
10751 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10752 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10753 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10754 RTX_UNCHANGING_P (vector) = 1;
10755 convert_move (temp, vector, 0);
10757 emit_jump_insn (gen_tablejump (temp, table_label));
10759 /* If we are generating PIC code or if the table is PC-relative, the
10760 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10761 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10765 #endif /* HAVE_tablejump */