1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
31 #include "hard-reg-set.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
49 #ifndef ACCUMULATE_OUTGOING_ARGS
50 #define ACCUMULATE_OUTGOING_ARGS 0
53 /* Supply a default definition for PUSH_ARGS. */
56 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
62 /* Decide whether a function's arguments should be processed
63 from first to last or from last to first.
65 They should if the stack and args grow in opposite directions, but
66 only if we have push insns. */
70 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
71 #define PUSH_ARGS_REVERSED /* If it's last to first */
76 #ifndef STACK_PUSH_CODE
77 #ifdef STACK_GROWS_DOWNWARD
78 #define STACK_PUSH_CODE PRE_DEC
80 #define STACK_PUSH_CODE PRE_INC
84 /* Assume that case vectors are not pc-relative. */
85 #ifndef CASE_VECTOR_PC_RELATIVE
86 #define CASE_VECTOR_PC_RELATIVE 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Nonzero to generate code for all the subroutines within an
98 expression before generating the upper levels of the expression.
99 Nowadays this is never zero. */
100 int do_preexpand_calls = 1;
102 /* Don't check memory usage, since code is being emitted to check a memory
103 usage. Used when current_function_check_memory_usage is true, to avoid
104 infinite recursion. */
105 static int in_check_memory_usage;
107 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
108 static tree placeholder_list = 0;
110 /* This structure is used by move_by_pieces to describe the move to
112 struct move_by_pieces
123 int explicit_inc_from;
131 /* This structure is used by clear_by_pieces to describe the clear to
134 struct clear_by_pieces
146 extern struct obstack permanent_obstack;
148 static rtx get_push_address PARAMS ((int));
150 static rtx enqueue_insn PARAMS ((rtx, rtx));
151 static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
152 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
153 struct move_by_pieces *));
154 static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
155 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
157 struct clear_by_pieces *));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, unsigned int, int));
163 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
165 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
166 HOST_WIDE_INT, enum machine_mode,
167 tree, enum machine_mode, int,
168 unsigned int, HOST_WIDE_INT, int));
169 static enum memory_use_mode
170 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
171 static tree save_noncopied_parts PARAMS ((tree, tree));
172 static tree init_noncopied_parts PARAMS ((tree, tree));
173 static int safe_from_p PARAMS ((rtx, tree, int));
174 static int fixed_type_p PARAMS ((tree));
175 static rtx var_rtx PARAMS ((tree));
176 static int readonly_fields_p PARAMS ((tree));
177 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
178 static rtx expand_increment PARAMS ((tree, int, int));
179 static void preexpand_calls PARAMS ((tree));
180 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
181 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
182 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
184 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
186 /* Record for each mode whether we can move a register directly to or
187 from an object of that mode in memory. If we can't, we won't try
188 to use that mode directly when accessing a field of that mode. */
190 static char direct_load[NUM_MACHINE_MODES];
191 static char direct_store[NUM_MACHINE_MODES];
193 /* If a memory-to-memory move would take MOVE_RATIO or more simple
194 move-instruction sequences, we will do a movstr or libcall instead. */
197 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
200 /* If we are optimizing for space (-Os), cut down the default move ratio */
201 #define MOVE_RATIO (optimize_size ? 3 : 15)
205 /* This macro is used to determine whether move_by_pieces should be called
206 to perform a structure copy. */
207 #ifndef MOVE_BY_PIECES_P
208 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
209 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
212 /* This array records the insn_code of insns to perform block moves. */
213 enum insn_code movstr_optab[NUM_MACHINE_MODES];
215 /* This array records the insn_code of insns to perform block clears. */
216 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
218 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
220 #ifndef SLOW_UNALIGNED_ACCESS
221 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
224 /* This is run once per compilation to set up which modes can be used
225 directly in memory and to initialize the block move optab. */
231 enum machine_mode mode;
238 /* Since we are on the permanent obstack, we must be sure we save this
239 spot AFTER we call start_sequence, since it will reuse the rtl it
241 free_point = (char *) oballoc (0);
243 /* Try indexing by frame ptr and try by stack ptr.
244 It is known that on the Convex the stack ptr isn't a valid index.
245 With luck, one or the other is valid on any machine. */
246 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
247 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
249 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
250 pat = PATTERN (insn);
252 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
253 mode = (enum machine_mode) ((int) mode + 1))
258 direct_load[(int) mode] = direct_store[(int) mode] = 0;
259 PUT_MODE (mem, mode);
260 PUT_MODE (mem1, mode);
262 /* See if there is some register that can be used in this mode and
263 directly loaded or stored from memory. */
265 if (mode != VOIDmode && mode != BLKmode)
266 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
267 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
270 if (! HARD_REGNO_MODE_OK (regno, mode))
273 reg = gen_rtx_REG (mode, regno);
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
280 SET_SRC (pat) = mem1;
281 SET_DEST (pat) = reg;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_load[(int) mode] = 1;
286 SET_DEST (pat) = mem;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
291 SET_DEST (pat) = mem1;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_store[(int) mode] = 1;
301 /* This is run at the start of compiling a function. */
306 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
309 pending_stack_adjust = 0;
310 stack_pointer_delta = 0;
311 inhibit_defer_pop = 0;
313 apply_args_value = 0;
319 struct expr_status *p;
324 ggc_mark_rtx (p->x_saveregs_value);
325 ggc_mark_rtx (p->x_apply_args_value);
326 ggc_mark_rtx (p->x_forced_labels);
337 /* Small sanity check that the queue is empty at the end of a function. */
339 finish_expr_for_function ()
345 /* Manage the queue of increment instructions to be output
346 for POSTINCREMENT_EXPR expressions, etc. */
348 /* Queue up to increment (or change) VAR later. BODY says how:
349 BODY should be the same thing you would pass to emit_insn
350 to increment right away. It will go to emit_insn later on.
352 The value is a QUEUED expression to be used in place of VAR
353 where you want to guarantee the pre-incrementation value of VAR. */
356 enqueue_insn (var, body)
359 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
360 body, pending_chain);
361 return pending_chain;
364 /* Use protect_from_queue to convert a QUEUED expression
365 into something that you can put immediately into an instruction.
366 If the queued incrementation has not happened yet,
367 protect_from_queue returns the variable itself.
368 If the incrementation has happened, protect_from_queue returns a temp
369 that contains a copy of the old value of the variable.
371 Any time an rtx which might possibly be a QUEUED is to be put
372 into an instruction, it must be passed through protect_from_queue first.
373 QUEUED expressions are not meaningful in instructions.
375 Do not pass a value through protect_from_queue and then hold
376 on to it for a while before putting it in an instruction!
377 If the queue is flushed in between, incorrect code will result. */
380 protect_from_queue (x, modify)
384 register RTX_CODE code = GET_CODE (x);
386 #if 0 /* A QUEUED can hang around after the queue is forced out. */
387 /* Shortcut for most common case. */
388 if (pending_chain == 0)
394 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
395 use of autoincrement. Make a copy of the contents of the memory
396 location rather than a copy of the address, but not if the value is
397 of mode BLKmode. Don't modify X in place since it might be
399 if (code == MEM && GET_MODE (x) != BLKmode
400 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
402 register rtx y = XEXP (x, 0);
403 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
405 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
406 MEM_COPY_ATTRIBUTES (new, x);
407 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
411 register rtx temp = gen_reg_rtx (GET_MODE (new));
412 emit_insn_before (gen_move_insn (temp, new),
418 /* Otherwise, recursively protect the subexpressions of all
419 the kinds of rtx's that can contain a QUEUED. */
422 rtx tem = protect_from_queue (XEXP (x, 0), 0);
423 if (tem != XEXP (x, 0))
429 else if (code == PLUS || code == MULT)
431 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
432 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
433 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
442 /* If the increment has not happened, use the variable itself. */
443 if (QUEUED_INSN (x) == 0)
444 return QUEUED_VAR (x);
445 /* If the increment has happened and a pre-increment copy exists,
447 if (QUEUED_COPY (x) != 0)
448 return QUEUED_COPY (x);
449 /* The increment has happened but we haven't set up a pre-increment copy.
450 Set one up now, and use it. */
451 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
452 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
454 return QUEUED_COPY (x);
457 /* Return nonzero if X contains a QUEUED expression:
458 if it contains anything that will be altered by a queued increment.
459 We handle only combinations of MEM, PLUS, MINUS and MULT operators
460 since memory addresses generally contain only those. */
466 register enum rtx_code code = GET_CODE (x);
472 return queued_subexp_p (XEXP (x, 0));
476 return (queued_subexp_p (XEXP (x, 0))
477 || queued_subexp_p (XEXP (x, 1)));
483 /* Perform all the pending incrementations. */
489 while ((p = pending_chain))
491 rtx body = QUEUED_BODY (p);
493 if (GET_CODE (body) == SEQUENCE)
495 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
496 emit_insn (QUEUED_BODY (p));
499 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
500 pending_chain = QUEUED_NEXT (p);
504 /* Copy data from FROM to TO, where the machine modes are not the same.
505 Both modes may be integer, or both may be floating.
506 UNSIGNEDP should be nonzero if FROM is an unsigned type.
507 This causes zero-extension instead of sign-extension. */
510 convert_move (to, from, unsignedp)
511 register rtx to, from;
514 enum machine_mode to_mode = GET_MODE (to);
515 enum machine_mode from_mode = GET_MODE (from);
516 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
517 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
521 /* rtx code for making an equivalent value. */
522 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
524 to = protect_from_queue (to, 1);
525 from = protect_from_queue (from, 0);
527 if (to_real != from_real)
530 /* If FROM is a SUBREG that indicates that we have already done at least
531 the required extension, strip it. We don't handle such SUBREGs as
534 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
535 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
536 >= GET_MODE_SIZE (to_mode))
537 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
538 from = gen_lowpart (to_mode, from), from_mode = to_mode;
540 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
543 if (to_mode == from_mode
544 || (from_mode == VOIDmode && CONSTANT_P (from)))
546 emit_move_insn (to, from);
554 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
556 /* Try converting directly if the insn is supported. */
557 if ((code = can_extend_p (to_mode, from_mode, 0))
560 emit_unop_insn (code, to, from, UNKNOWN);
565 #ifdef HAVE_trunchfqf2
566 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
568 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
572 #ifdef HAVE_trunctqfqf2
573 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
575 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
579 #ifdef HAVE_truncsfqf2
580 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
582 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
586 #ifdef HAVE_truncdfqf2
587 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
589 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
593 #ifdef HAVE_truncxfqf2
594 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
596 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
600 #ifdef HAVE_trunctfqf2
601 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
603 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
608 #ifdef HAVE_trunctqfhf2
609 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
611 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
615 #ifdef HAVE_truncsfhf2
616 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
618 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
622 #ifdef HAVE_truncdfhf2
623 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
625 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
629 #ifdef HAVE_truncxfhf2
630 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
632 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
636 #ifdef HAVE_trunctfhf2
637 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
639 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
644 #ifdef HAVE_truncsftqf2
645 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
647 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
651 #ifdef HAVE_truncdftqf2
652 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
654 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
658 #ifdef HAVE_truncxftqf2
659 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
661 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
665 #ifdef HAVE_trunctftqf2
666 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
668 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
673 #ifdef HAVE_truncdfsf2
674 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
676 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
680 #ifdef HAVE_truncxfsf2
681 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
683 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
687 #ifdef HAVE_trunctfsf2
688 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
690 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
694 #ifdef HAVE_truncxfdf2
695 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
697 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
701 #ifdef HAVE_trunctfdf2
702 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
704 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
716 libcall = extendsfdf2_libfunc;
720 libcall = extendsfxf2_libfunc;
724 libcall = extendsftf2_libfunc;
736 libcall = truncdfsf2_libfunc;
740 libcall = extenddfxf2_libfunc;
744 libcall = extenddftf2_libfunc;
756 libcall = truncxfsf2_libfunc;
760 libcall = truncxfdf2_libfunc;
772 libcall = trunctfsf2_libfunc;
776 libcall = trunctfdf2_libfunc;
788 if (libcall == (rtx) 0)
789 /* This conversion is not implemented yet. */
792 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
794 emit_move_insn (to, value);
798 /* Now both modes are integers. */
800 /* Handle expanding beyond a word. */
801 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
802 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
809 enum machine_mode lowpart_mode;
810 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
812 /* Try converting directly if the insn is supported. */
813 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
816 /* If FROM is a SUBREG, put it into a register. Do this
817 so that we always generate the same set of insns for
818 better cse'ing; if an intermediate assignment occurred,
819 we won't be doing the operation directly on the SUBREG. */
820 if (optimize > 0 && GET_CODE (from) == SUBREG)
821 from = force_reg (from_mode, from);
822 emit_unop_insn (code, to, from, equiv_code);
825 /* Next, try converting via full word. */
826 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
827 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
828 != CODE_FOR_nothing))
830 if (GET_CODE (to) == REG)
831 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
832 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
833 emit_unop_insn (code, to,
834 gen_lowpart (word_mode, to), equiv_code);
838 /* No special multiword conversion insn; do it by hand. */
841 /* Since we will turn this into a no conflict block, we must ensure
842 that the source does not overlap the target. */
844 if (reg_overlap_mentioned_p (to, from))
845 from = force_reg (from_mode, from);
847 /* Get a copy of FROM widened to a word, if necessary. */
848 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
849 lowpart_mode = word_mode;
851 lowpart_mode = from_mode;
853 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
855 lowpart = gen_lowpart (lowpart_mode, to);
856 emit_move_insn (lowpart, lowfrom);
858 /* Compute the value to put in each remaining word. */
860 fill_value = const0_rtx;
865 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
866 && STORE_FLAG_VALUE == -1)
868 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
870 fill_value = gen_reg_rtx (word_mode);
871 emit_insn (gen_slt (fill_value));
877 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
878 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
880 fill_value = convert_to_mode (word_mode, fill_value, 1);
884 /* Fill the remaining words. */
885 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
887 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
888 rtx subword = operand_subword (to, index, 1, to_mode);
893 if (fill_value != subword)
894 emit_move_insn (subword, fill_value);
897 insns = get_insns ();
900 emit_no_conflict_block (insns, to, from, NULL_RTX,
901 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
905 /* Truncating multi-word to a word or less. */
906 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
907 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
909 if (!((GET_CODE (from) == MEM
910 && ! MEM_VOLATILE_P (from)
911 && direct_load[(int) to_mode]
912 && ! mode_dependent_address_p (XEXP (from, 0)))
913 || GET_CODE (from) == REG
914 || GET_CODE (from) == SUBREG))
915 from = force_reg (from_mode, from);
916 convert_move (to, gen_lowpart (word_mode, from), 0);
920 /* Handle pointer conversion */ /* SPEE 900220 */
921 if (to_mode == PQImode)
923 if (from_mode != QImode)
924 from = convert_to_mode (QImode, from, unsignedp);
926 #ifdef HAVE_truncqipqi2
927 if (HAVE_truncqipqi2)
929 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
932 #endif /* HAVE_truncqipqi2 */
936 if (from_mode == PQImode)
938 if (to_mode != QImode)
940 from = convert_to_mode (QImode, from, unsignedp);
945 #ifdef HAVE_extendpqiqi2
946 if (HAVE_extendpqiqi2)
948 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
951 #endif /* HAVE_extendpqiqi2 */
956 if (to_mode == PSImode)
958 if (from_mode != SImode)
959 from = convert_to_mode (SImode, from, unsignedp);
961 #ifdef HAVE_truncsipsi2
962 if (HAVE_truncsipsi2)
964 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
967 #endif /* HAVE_truncsipsi2 */
971 if (from_mode == PSImode)
973 if (to_mode != SImode)
975 from = convert_to_mode (SImode, from, unsignedp);
980 #ifdef HAVE_extendpsisi2
981 if (HAVE_extendpsisi2)
983 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
986 #endif /* HAVE_extendpsisi2 */
991 if (to_mode == PDImode)
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1002 #endif /* HAVE_truncdipdi2 */
1006 if (from_mode == PDImode)
1008 if (to_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1021 #endif /* HAVE_extendpdidi2 */
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1055 emit_unop_insn (code, to, from, equiv_code);
1060 enum machine_mode intermediate;
1064 /* Search for a mode to convert via. */
1065 for (intermediate = from_mode; intermediate != VOIDmode;
1066 intermediate = GET_MODE_WIDER_MODE (intermediate))
1067 if (((can_extend_p (to_mode, intermediate, unsignedp)
1068 != CODE_FOR_nothing)
1069 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1070 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1071 GET_MODE_BITSIZE (intermediate))))
1072 && (can_extend_p (intermediate, from_mode, unsignedp)
1073 != CODE_FOR_nothing))
1075 convert_move (to, convert_to_mode (intermediate, from,
1076 unsignedp), unsignedp);
1080 /* No suitable intermediate mode.
1081 Generate what we need with shifts. */
1082 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1083 - GET_MODE_BITSIZE (from_mode), 0);
1084 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1085 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1087 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1090 emit_move_insn (to, tmp);
1095 /* Support special truncate insns for certain modes. */
1097 if (from_mode == DImode && to_mode == SImode)
1099 #ifdef HAVE_truncdisi2
1100 if (HAVE_truncdisi2)
1102 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1106 convert_move (to, force_reg (from_mode, from), unsignedp);
1110 if (from_mode == DImode && to_mode == HImode)
1112 #ifdef HAVE_truncdihi2
1113 if (HAVE_truncdihi2)
1115 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1119 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 if (from_mode == DImode && to_mode == QImode)
1125 #ifdef HAVE_truncdiqi2
1126 if (HAVE_truncdiqi2)
1128 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1132 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 if (from_mode == SImode && to_mode == HImode)
1138 #ifdef HAVE_truncsihi2
1139 if (HAVE_truncsihi2)
1141 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1145 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 if (from_mode == SImode && to_mode == QImode)
1151 #ifdef HAVE_truncsiqi2
1152 if (HAVE_truncsiqi2)
1154 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1158 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 if (from_mode == HImode && to_mode == QImode)
1164 #ifdef HAVE_trunchiqi2
1165 if (HAVE_trunchiqi2)
1167 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 if (from_mode == TImode && to_mode == DImode)
1177 #ifdef HAVE_trunctidi2
1178 if (HAVE_trunctidi2)
1180 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 if (from_mode == TImode && to_mode == SImode)
1190 #ifdef HAVE_trunctisi2
1191 if (HAVE_trunctisi2)
1193 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 if (from_mode == TImode && to_mode == HImode)
1203 #ifdef HAVE_trunctihi2
1204 if (HAVE_trunctihi2)
1206 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 if (from_mode == TImode && to_mode == QImode)
1216 #ifdef HAVE_trunctiqi2
1217 if (HAVE_trunctiqi2)
1219 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 /* Handle truncation of volatile memrefs, and so on;
1228 the things that couldn't be truncated directly,
1229 and for which there was no special instruction. */
1230 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1232 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1233 emit_move_insn (to, temp);
1237 /* Mode combination is not recognized. */
1241 /* Return an rtx for a value that would result
1242 from converting X to mode MODE.
1243 Both X and MODE may be floating, or both integer.
1244 UNSIGNEDP is nonzero if X is an unsigned value.
1245 This can be done by referring to a part of X in place
1246 or by copying to a new temporary with conversion.
1248 This function *must not* call protect_from_queue
1249 except when putting X into an insn (in which case convert_move does it). */
1252 convert_to_mode (mode, x, unsignedp)
1253 enum machine_mode mode;
1257 return convert_modes (mode, VOIDmode, x, unsignedp);
1260 /* Return an rtx for a value that would result
1261 from converting X from mode OLDMODE to mode MODE.
1262 Both modes may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1265 This can be done by referring to a part of X in place
1266 or by copying to a new temporary with conversion.
1268 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1270 This function *must not* call protect_from_queue
1271 except when putting X into an insn (in which case convert_move does it). */
1274 convert_modes (mode, oldmode, x, unsignedp)
1275 enum machine_mode mode, oldmode;
1281 /* If FROM is a SUBREG that indicates that we have already done at least
1282 the required extension, strip it. */
1284 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1285 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1286 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1287 x = gen_lowpart (mode, x);
1289 if (GET_MODE (x) != VOIDmode)
1290 oldmode = GET_MODE (x);
1292 if (mode == oldmode)
1295 /* There is one case that we must handle specially: If we are converting
1296 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1297 we are to interpret the constant as unsigned, gen_lowpart will do
1298 the wrong if the constant appears negative. What we want to do is
1299 make the high-order word of the constant zero, not all ones. */
1301 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1302 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1303 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1305 HOST_WIDE_INT val = INTVAL (x);
1307 if (oldmode != VOIDmode
1308 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1310 int width = GET_MODE_BITSIZE (oldmode);
1312 /* We need to zero extend VAL. */
1313 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1316 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1319 /* We can do this with a gen_lowpart if both desired and current modes
1320 are integer, and this is either a constant integer, a register, or a
1321 non-volatile MEM. Except for the constant case where MODE is no
1322 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1324 if ((GET_CODE (x) == CONST_INT
1325 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1326 || (GET_MODE_CLASS (mode) == MODE_INT
1327 && GET_MODE_CLASS (oldmode) == MODE_INT
1328 && (GET_CODE (x) == CONST_DOUBLE
1329 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1330 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1331 && direct_load[(int) mode])
1332 || (GET_CODE (x) == REG
1333 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1334 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1336 /* ?? If we don't know OLDMODE, we have to assume here that
1337 X does not need sign- or zero-extension. This may not be
1338 the case, but it's the best we can do. */
1339 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1340 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1342 HOST_WIDE_INT val = INTVAL (x);
1343 int width = GET_MODE_BITSIZE (oldmode);
1345 /* We must sign or zero-extend in this case. Start by
1346 zero-extending, then sign extend if we need to. */
1347 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1349 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1350 val |= (HOST_WIDE_INT) (-1) << width;
1352 return GEN_INT (val);
1355 return gen_lowpart (mode, x);
1358 temp = gen_reg_rtx (mode);
1359 convert_move (temp, x, unsignedp);
1364 /* This macro is used to determine what the largest unit size that
1365 move_by_pieces can use is. */
1367 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1368 move efficiently, as opposed to MOVE_MAX which is the maximum
1369 number of bytes we can move with a single instruction. */
1371 #ifndef MOVE_MAX_PIECES
1372 #define MOVE_MAX_PIECES MOVE_MAX
1375 /* Generate several move instructions to copy LEN bytes
1376 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1377 The caller must pass FROM and TO
1378 through protect_from_queue before calling.
1379 ALIGN is maximum alignment we can assume. */
1382 move_by_pieces (to, from, len, align)
1387 struct move_by_pieces data;
1388 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1389 unsigned int max_size = MOVE_MAX_PIECES + 1;
1390 enum machine_mode mode = VOIDmode, tmode;
1391 enum insn_code icode;
1394 data.to_addr = to_addr;
1395 data.from_addr = from_addr;
1399 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1400 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1402 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1403 || GET_CODE (from_addr) == POST_INC
1404 || GET_CODE (from_addr) == POST_DEC);
1406 data.explicit_inc_from = 0;
1407 data.explicit_inc_to = 0;
1409 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1410 if (data.reverse) data.offset = len;
1413 data.to_struct = MEM_IN_STRUCT_P (to);
1414 data.from_struct = MEM_IN_STRUCT_P (from);
1415 data.to_readonly = RTX_UNCHANGING_P (to);
1416 data.from_readonly = RTX_UNCHANGING_P (from);
1418 /* If copying requires more than two move insns,
1419 copy addresses to registers (to make displacements shorter)
1420 and use post-increment if available. */
1421 if (!(data.autinc_from && data.autinc_to)
1422 && move_by_pieces_ninsns (len, align) > 2)
1424 /* Find the mode of the largest move... */
1425 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1426 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1427 if (GET_MODE_SIZE (tmode) < max_size)
1430 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1432 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1433 data.autinc_from = 1;
1434 data.explicit_inc_from = -1;
1436 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1438 data.from_addr = copy_addr_to_reg (from_addr);
1439 data.autinc_from = 1;
1440 data.explicit_inc_from = 1;
1442 if (!data.autinc_from && CONSTANT_P (from_addr))
1443 data.from_addr = copy_addr_to_reg (from_addr);
1444 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1446 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1448 data.explicit_inc_to = -1;
1450 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1452 data.to_addr = copy_addr_to_reg (to_addr);
1454 data.explicit_inc_to = 1;
1456 if (!data.autinc_to && CONSTANT_P (to_addr))
1457 data.to_addr = copy_addr_to_reg (to_addr);
1460 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1461 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1462 align = MOVE_MAX * BITS_PER_UNIT;
1464 /* First move what we can in the largest integer mode, then go to
1465 successively smaller modes. */
1467 while (max_size > 1)
1469 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1470 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1471 if (GET_MODE_SIZE (tmode) < max_size)
1474 if (mode == VOIDmode)
1477 icode = mov_optab->handlers[(int) mode].insn_code;
1478 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1479 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1481 max_size = GET_MODE_SIZE (mode);
1484 /* The code above should have handled everything. */
1489 /* Return number of insns required to move L bytes by pieces.
1490 ALIGN (in bytes) is maximum alignment we can assume. */
1493 move_by_pieces_ninsns (l, align)
1497 register int n_insns = 0;
1498 unsigned int max_size = MOVE_MAX + 1;
1500 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1501 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1502 align = MOVE_MAX * BITS_PER_UNIT;
1504 while (max_size > 1)
1506 enum machine_mode mode = VOIDmode, tmode;
1507 enum insn_code icode;
1509 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1510 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1511 if (GET_MODE_SIZE (tmode) < max_size)
1514 if (mode == VOIDmode)
1517 icode = mov_optab->handlers[(int) mode].insn_code;
1518 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1519 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1521 max_size = GET_MODE_SIZE (mode);
1527 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1528 with move instructions for mode MODE. GENFUN is the gen_... function
1529 to make a move insn for that mode. DATA has all the other info. */
1532 move_by_pieces_1 (genfun, mode, data)
1533 rtx (*genfun) PARAMS ((rtx, ...));
1534 enum machine_mode mode;
1535 struct move_by_pieces *data;
1537 register int size = GET_MODE_SIZE (mode);
1538 register rtx to1, from1;
1540 while (data->len >= size)
1542 if (data->reverse) data->offset -= size;
1544 to1 = (data->autinc_to
1545 ? gen_rtx_MEM (mode, data->to_addr)
1546 : copy_rtx (change_address (data->to, mode,
1547 plus_constant (data->to_addr,
1549 MEM_IN_STRUCT_P (to1) = data->to_struct;
1550 RTX_UNCHANGING_P (to1) = data->to_readonly;
1553 = (data->autinc_from
1554 ? gen_rtx_MEM (mode, data->from_addr)
1555 : copy_rtx (change_address (data->from, mode,
1556 plus_constant (data->from_addr,
1558 MEM_IN_STRUCT_P (from1) = data->from_struct;
1559 RTX_UNCHANGING_P (from1) = data->from_readonly;
1561 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1562 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1563 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1564 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1566 emit_insn ((*genfun) (to1, from1));
1567 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1568 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1569 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1570 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1572 if (! data->reverse) data->offset += size;
1578 /* Emit code to move a block Y to a block X.
1579 This may be done with string-move instructions,
1580 with multiple scalar move instructions, or with a library call.
1582 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1584 SIZE is an rtx that says how long they are.
1585 ALIGN is the maximum alignment we can assume they have.
1587 Return the address of the new block, if memcpy is called and returns it,
1591 emit_block_move (x, y, size, align)
1597 #ifdef TARGET_MEM_FUNCTIONS
1599 tree call_expr, arg_list;
1602 if (GET_MODE (x) != BLKmode)
1605 if (GET_MODE (y) != BLKmode)
1608 x = protect_from_queue (x, 1);
1609 y = protect_from_queue (y, 0);
1610 size = protect_from_queue (size, 0);
1612 if (GET_CODE (x) != MEM)
1614 if (GET_CODE (y) != MEM)
1619 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1620 move_by_pieces (x, y, INTVAL (size), align);
1623 /* Try the most limited insn first, because there's no point
1624 including more than one in the machine description unless
1625 the more limited one has some advantage. */
1627 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1628 enum machine_mode mode;
1630 /* Since this is a move insn, we don't care about volatility. */
1633 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1634 mode = GET_MODE_WIDER_MODE (mode))
1636 enum insn_code code = movstr_optab[(int) mode];
1637 insn_operand_predicate_fn pred;
1639 if (code != CODE_FOR_nothing
1640 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1641 here because if SIZE is less than the mode mask, as it is
1642 returned by the macro, it will definitely be less than the
1643 actual mode mask. */
1644 && ((GET_CODE (size) == CONST_INT
1645 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1646 <= (GET_MODE_MASK (mode) >> 1)))
1647 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1648 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1649 || (*pred) (x, BLKmode))
1650 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1651 || (*pred) (y, BLKmode))
1652 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1653 || (*pred) (opalign, VOIDmode)))
1656 rtx last = get_last_insn ();
1659 op2 = convert_to_mode (mode, size, 1);
1660 pred = insn_data[(int) code].operand[2].predicate;
1661 if (pred != 0 && ! (*pred) (op2, mode))
1662 op2 = copy_to_mode_reg (mode, op2);
1664 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1672 delete_insns_since (last);
1678 /* X, Y, or SIZE may have been passed through protect_from_queue.
1680 It is unsafe to save the value generated by protect_from_queue
1681 and reuse it later. Consider what happens if emit_queue is
1682 called before the return value from protect_from_queue is used.
1684 Expansion of the CALL_EXPR below will call emit_queue before
1685 we are finished emitting RTL for argument setup. So if we are
1686 not careful we could get the wrong value for an argument.
1688 To avoid this problem we go ahead and emit code to copy X, Y &
1689 SIZE into new pseudos. We can then place those new pseudos
1690 into an RTL_EXPR and use them later, even after a call to
1693 Note this is not strictly needed for library calls since they
1694 do not call emit_queue before loading their arguments. However,
1695 we may need to have library calls call emit_queue in the future
1696 since failing to do so could cause problems for targets which
1697 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1698 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1699 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1701 #ifdef TARGET_MEM_FUNCTIONS
1702 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1704 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1705 TREE_UNSIGNED (integer_type_node));
1706 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1709 #ifdef TARGET_MEM_FUNCTIONS
1710 /* It is incorrect to use the libcall calling conventions to call
1711 memcpy in this context.
1713 This could be a user call to memcpy and the user may wish to
1714 examine the return value from memcpy.
1716 For targets where libcalls and normal calls have different conventions
1717 for returning pointers, we could end up generating incorrect code.
1719 So instead of using a libcall sequence we build up a suitable
1720 CALL_EXPR and expand the call in the normal fashion. */
1721 if (fn == NULL_TREE)
1725 /* This was copied from except.c, I don't know if all this is
1726 necessary in this context or not. */
1727 fn = get_identifier ("memcpy");
1728 push_obstacks_nochange ();
1729 end_temporary_allocation ();
1730 fntype = build_pointer_type (void_type_node);
1731 fntype = build_function_type (fntype, NULL_TREE);
1732 fn = build_decl (FUNCTION_DECL, fn, fntype);
1733 ggc_add_tree_root (&fn, 1);
1734 DECL_EXTERNAL (fn) = 1;
1735 TREE_PUBLIC (fn) = 1;
1736 DECL_ARTIFICIAL (fn) = 1;
1737 make_decl_rtl (fn, NULL_PTR, 1);
1738 assemble_external (fn);
1742 /* We need to make an argument list for the function call.
1744 memcpy has three arguments, the first two are void * addresses and
1745 the last is a size_t byte count for the copy. */
1747 = build_tree_list (NULL_TREE,
1748 make_tree (build_pointer_type (void_type_node), x));
1749 TREE_CHAIN (arg_list)
1750 = build_tree_list (NULL_TREE,
1751 make_tree (build_pointer_type (void_type_node), y));
1752 TREE_CHAIN (TREE_CHAIN (arg_list))
1753 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1754 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1756 /* Now we have to build up the CALL_EXPR itself. */
1757 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1758 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1759 call_expr, arg_list, NULL_TREE);
1760 TREE_SIDE_EFFECTS (call_expr) = 1;
1762 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1764 emit_library_call (bcopy_libfunc, 0,
1765 VOIDmode, 3, y, Pmode, x, Pmode,
1766 convert_to_mode (TYPE_MODE (integer_type_node), size,
1767 TREE_UNSIGNED (integer_type_node)),
1768 TYPE_MODE (integer_type_node));
1775 /* Copy all or part of a value X into registers starting at REGNO.
1776 The number of registers to be filled is NREGS. */
1779 move_block_to_reg (regno, x, nregs, mode)
1783 enum machine_mode mode;
1786 #ifdef HAVE_load_multiple
1794 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1795 x = validize_mem (force_const_mem (mode, x));
1797 /* See if the machine can do this with a load multiple insn. */
1798 #ifdef HAVE_load_multiple
1799 if (HAVE_load_multiple)
1801 last = get_last_insn ();
1802 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1810 delete_insns_since (last);
1814 for (i = 0; i < nregs; i++)
1815 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1816 operand_subword_force (x, i, mode));
1819 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1820 The number of registers to be filled is NREGS. SIZE indicates the number
1821 of bytes in the object X. */
1825 move_block_from_reg (regno, x, nregs, size)
1832 #ifdef HAVE_store_multiple
1836 enum machine_mode mode;
1838 /* If SIZE is that of a mode no bigger than a word, just use that
1839 mode's store operation. */
1840 if (size <= UNITS_PER_WORD
1841 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1843 emit_move_insn (change_address (x, mode, NULL),
1844 gen_rtx_REG (mode, regno));
1848 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1849 to the left before storing to memory. Note that the previous test
1850 doesn't handle all cases (e.g. SIZE == 3). */
1851 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1853 rtx tem = operand_subword (x, 0, 1, BLKmode);
1859 shift = expand_shift (LSHIFT_EXPR, word_mode,
1860 gen_rtx_REG (word_mode, regno),
1861 build_int_2 ((UNITS_PER_WORD - size)
1862 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1863 emit_move_insn (tem, shift);
1867 /* See if the machine can do this with a store multiple insn. */
1868 #ifdef HAVE_store_multiple
1869 if (HAVE_store_multiple)
1871 last = get_last_insn ();
1872 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1880 delete_insns_since (last);
1884 for (i = 0; i < nregs; i++)
1886 rtx tem = operand_subword (x, i, 1, BLKmode);
1891 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1895 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1896 registers represented by a PARALLEL. SSIZE represents the total size of
1897 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1899 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1900 the balance will be in what would be the low-order memory addresses, i.e.
1901 left justified for big endian, right justified for little endian. This
1902 happens to be true for the targets currently using this support. If this
1903 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1907 emit_group_load (dst, orig_src, ssize, align)
1915 if (GET_CODE (dst) != PARALLEL)
1918 /* Check for a NULL entry, used to indicate that the parameter goes
1919 both on the stack and in registers. */
1920 if (XEXP (XVECEXP (dst, 0, 0), 0))
1925 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1927 /* If we won't be loading directly from memory, protect the real source
1928 from strange tricks we might play. */
1930 if (GET_CODE (src) != MEM)
1932 if (GET_CODE (src) == VOIDmode)
1933 src = gen_reg_rtx (GET_MODE (dst));
1935 src = gen_reg_rtx (GET_MODE (orig_src));
1936 emit_move_insn (src, orig_src);
1939 /* Process the pieces. */
1940 for (i = start; i < XVECLEN (dst, 0); i++)
1942 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1943 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1944 unsigned int bytelen = GET_MODE_SIZE (mode);
1947 /* Handle trailing fragments that run over the size of the struct. */
1948 if (ssize >= 0 && bytepos + bytelen > ssize)
1950 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1951 bytelen = ssize - bytepos;
1956 /* Optimize the access just a bit. */
1957 if (GET_CODE (src) == MEM
1958 && align >= GET_MODE_ALIGNMENT (mode)
1959 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1960 && bytelen == GET_MODE_SIZE (mode))
1962 tmps[i] = gen_reg_rtx (mode);
1963 emit_move_insn (tmps[i],
1964 change_address (src, mode,
1965 plus_constant (XEXP (src, 0),
1968 else if (GET_CODE (src) == CONCAT)
1971 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1972 tmps[i] = XEXP (src, 0);
1973 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1974 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1975 tmps[i] = XEXP (src, 1);
1980 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1981 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1982 mode, mode, align, ssize);
1984 if (BYTES_BIG_ENDIAN && shift)
1985 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1986 tmps[i], 0, OPTAB_WIDEN);
1991 /* Copy the extracted pieces into the proper (probable) hard regs. */
1992 for (i = start; i < XVECLEN (dst, 0); i++)
1993 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1996 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1997 registers represented by a PARALLEL. SSIZE represents the total size of
1998 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2001 emit_group_store (orig_dst, src, ssize, align)
2009 if (GET_CODE (src) != PARALLEL)
2012 /* Check for a NULL entry, used to indicate that the parameter goes
2013 both on the stack and in registers. */
2014 if (XEXP (XVECEXP (src, 0, 0), 0))
2019 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2021 /* Copy the (probable) hard regs into pseudos. */
2022 for (i = start; i < XVECLEN (src, 0); i++)
2024 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2025 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2026 emit_move_insn (tmps[i], reg);
2030 /* If we won't be storing directly into memory, protect the real destination
2031 from strange tricks we might play. */
2033 if (GET_CODE (dst) == PARALLEL)
2037 /* We can get a PARALLEL dst if there is a conditional expression in
2038 a return statement. In that case, the dst and src are the same,
2039 so no action is necessary. */
2040 if (rtx_equal_p (dst, src))
2043 /* It is unclear if we can ever reach here, but we may as well handle
2044 it. Allocate a temporary, and split this into a store/load to/from
2047 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2048 emit_group_store (temp, src, ssize, align);
2049 emit_group_load (dst, temp, ssize, align);
2052 else if (GET_CODE (dst) != MEM)
2054 dst = gen_reg_rtx (GET_MODE (orig_dst));
2055 /* Make life a bit easier for combine. */
2056 emit_move_insn (dst, const0_rtx);
2058 else if (! MEM_IN_STRUCT_P (dst))
2060 /* store_bit_field requires that memory operations have
2061 mem_in_struct_p set; we might not. */
2063 dst = copy_rtx (orig_dst);
2064 MEM_SET_IN_STRUCT_P (dst, 1);
2067 /* Process the pieces. */
2068 for (i = start; i < XVECLEN (src, 0); i++)
2070 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2071 enum machine_mode mode = GET_MODE (tmps[i]);
2072 unsigned int bytelen = GET_MODE_SIZE (mode);
2074 /* Handle trailing fragments that run over the size of the struct. */
2075 if (ssize >= 0 && bytepos + bytelen > ssize)
2077 if (BYTES_BIG_ENDIAN)
2079 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2080 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2081 tmps[i], 0, OPTAB_WIDEN);
2083 bytelen = ssize - bytepos;
2086 /* Optimize the access just a bit. */
2087 if (GET_CODE (dst) == MEM
2088 && align >= GET_MODE_ALIGNMENT (mode)
2089 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2090 && bytelen == GET_MODE_SIZE (mode))
2091 emit_move_insn (change_address (dst, mode,
2092 plus_constant (XEXP (dst, 0),
2096 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2097 mode, tmps[i], align, ssize);
2102 /* Copy from the pseudo into the (probable) hard reg. */
2103 if (GET_CODE (dst) == REG)
2104 emit_move_insn (orig_dst, dst);
2107 /* Generate code to copy a BLKmode object of TYPE out of a
2108 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2109 is null, a stack temporary is created. TGTBLK is returned.
2111 The primary purpose of this routine is to handle functions
2112 that return BLKmode structures in registers. Some machines
2113 (the PA for example) want to return all small structures
2114 in registers regardless of the structure's alignment. */
2117 copy_blkmode_from_reg (tgtblk, srcreg, type)
2122 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2123 rtx src = NULL, dst = NULL;
2124 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2125 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2129 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2130 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2131 preserve_temp_slots (tgtblk);
2134 /* This code assumes srcreg is at least a full word. If it isn't,
2135 copy it into a new pseudo which is a full word. */
2136 if (GET_MODE (srcreg) != BLKmode
2137 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2138 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2140 /* Structures whose size is not a multiple of a word are aligned
2141 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2142 machine, this means we must skip the empty high order bytes when
2143 calculating the bit offset. */
2144 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2145 big_endian_correction
2146 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2148 /* Copy the structure BITSIZE bites at a time.
2150 We could probably emit more efficient code for machines which do not use
2151 strict alignment, but it doesn't seem worth the effort at the current
2153 for (bitpos = 0, xbitpos = big_endian_correction;
2154 bitpos < bytes * BITS_PER_UNIT;
2155 bitpos += bitsize, xbitpos += bitsize)
2157 /* We need a new source operand each time xbitpos is on a
2158 word boundary and when xbitpos == big_endian_correction
2159 (the first time through). */
2160 if (xbitpos % BITS_PER_WORD == 0
2161 || xbitpos == big_endian_correction)
2162 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2164 /* We need a new destination operand each time bitpos is on
2166 if (bitpos % BITS_PER_WORD == 0)
2167 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2169 /* Use xbitpos for the source extraction (right justified) and
2170 xbitpos for the destination store (left justified). */
2171 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2172 extract_bit_field (src, bitsize,
2173 xbitpos % BITS_PER_WORD, 1,
2174 NULL_RTX, word_mode, word_mode,
2175 bitsize, BITS_PER_WORD),
2176 bitsize, BITS_PER_WORD);
2183 /* Add a USE expression for REG to the (possibly empty) list pointed
2184 to by CALL_FUSAGE. REG must denote a hard register. */
2187 use_reg (call_fusage, reg)
2188 rtx *call_fusage, reg;
2190 if (GET_CODE (reg) != REG
2191 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2195 = gen_rtx_EXPR_LIST (VOIDmode,
2196 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2199 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2200 starting at REGNO. All of these registers must be hard registers. */
2203 use_regs (call_fusage, regno, nregs)
2210 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2213 for (i = 0; i < nregs; i++)
2214 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2217 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2218 PARALLEL REGS. This is for calls that pass values in multiple
2219 non-contiguous locations. The Irix 6 ABI has examples of this. */
2222 use_group_regs (call_fusage, regs)
2228 for (i = 0; i < XVECLEN (regs, 0); i++)
2230 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2232 /* A NULL entry means the parameter goes both on the stack and in
2233 registers. This can also be a MEM for targets that pass values
2234 partially on the stack and partially in registers. */
2235 if (reg != 0 && GET_CODE (reg) == REG)
2236 use_reg (call_fusage, reg);
2240 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2241 rtx with BLKmode). The caller must pass TO through protect_from_queue
2242 before calling. ALIGN is maximum alignment we can assume. */
2245 clear_by_pieces (to, len, align)
2250 struct clear_by_pieces data;
2251 rtx to_addr = XEXP (to, 0);
2252 unsigned int max_size = MOVE_MAX_PIECES + 1;
2253 enum machine_mode mode = VOIDmode, tmode;
2254 enum insn_code icode;
2257 data.to_addr = to_addr;
2260 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2261 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2263 data.explicit_inc_to = 0;
2265 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2266 if (data.reverse) data.offset = len;
2269 data.to_struct = MEM_IN_STRUCT_P (to);
2271 /* If copying requires more than two move insns,
2272 copy addresses to registers (to make displacements shorter)
2273 and use post-increment if available. */
2275 && move_by_pieces_ninsns (len, align) > 2)
2277 /* Determine the main mode we'll be using */
2278 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2279 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2280 if (GET_MODE_SIZE (tmode) < max_size)
2283 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2285 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2287 data.explicit_inc_to = -1;
2289 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2291 data.to_addr = copy_addr_to_reg (to_addr);
2293 data.explicit_inc_to = 1;
2295 if (!data.autinc_to && CONSTANT_P (to_addr))
2296 data.to_addr = copy_addr_to_reg (to_addr);
2299 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2300 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2301 align = MOVE_MAX * BITS_PER_UNIT;
2303 /* First move what we can in the largest integer mode, then go to
2304 successively smaller modes. */
2306 while (max_size > 1)
2308 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2309 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2310 if (GET_MODE_SIZE (tmode) < max_size)
2313 if (mode == VOIDmode)
2316 icode = mov_optab->handlers[(int) mode].insn_code;
2317 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2318 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2320 max_size = GET_MODE_SIZE (mode);
2323 /* The code above should have handled everything. */
2328 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2329 with move instructions for mode MODE. GENFUN is the gen_... function
2330 to make a move insn for that mode. DATA has all the other info. */
2333 clear_by_pieces_1 (genfun, mode, data)
2334 rtx (*genfun) PARAMS ((rtx, ...));
2335 enum machine_mode mode;
2336 struct clear_by_pieces *data;
2338 register int size = GET_MODE_SIZE (mode);
2341 while (data->len >= size)
2343 if (data->reverse) data->offset -= size;
2345 to1 = (data->autinc_to
2346 ? gen_rtx_MEM (mode, data->to_addr)
2347 : copy_rtx (change_address (data->to, mode,
2348 plus_constant (data->to_addr,
2350 MEM_IN_STRUCT_P (to1) = data->to_struct;
2352 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2353 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2355 emit_insn ((*genfun) (to1, const0_rtx));
2356 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2357 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2359 if (! data->reverse) data->offset += size;
2365 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2366 its length in bytes and ALIGN is the maximum alignment we can is has.
2368 If we call a function that returns the length of the block, return it. */
2371 clear_storage (object, size, align)
2376 #ifdef TARGET_MEM_FUNCTIONS
2378 tree call_expr, arg_list;
2382 if (GET_MODE (object) == BLKmode)
2384 object = protect_from_queue (object, 1);
2385 size = protect_from_queue (size, 0);
2387 if (GET_CODE (size) == CONST_INT
2388 && MOVE_BY_PIECES_P (INTVAL (size), align))
2389 clear_by_pieces (object, INTVAL (size), align);
2392 /* Try the most limited insn first, because there's no point
2393 including more than one in the machine description unless
2394 the more limited one has some advantage. */
2396 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2397 enum machine_mode mode;
2399 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2400 mode = GET_MODE_WIDER_MODE (mode))
2402 enum insn_code code = clrstr_optab[(int) mode];
2403 insn_operand_predicate_fn pred;
2405 if (code != CODE_FOR_nothing
2406 /* We don't need MODE to be narrower than
2407 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2408 the mode mask, as it is returned by the macro, it will
2409 definitely be less than the actual mode mask. */
2410 && ((GET_CODE (size) == CONST_INT
2411 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2412 <= (GET_MODE_MASK (mode) >> 1)))
2413 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2414 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2415 || (*pred) (object, BLKmode))
2416 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2417 || (*pred) (opalign, VOIDmode)))
2420 rtx last = get_last_insn ();
2423 op1 = convert_to_mode (mode, size, 1);
2424 pred = insn_data[(int) code].operand[1].predicate;
2425 if (pred != 0 && ! (*pred) (op1, mode))
2426 op1 = copy_to_mode_reg (mode, op1);
2428 pat = GEN_FCN ((int) code) (object, op1, opalign);
2435 delete_insns_since (last);
2439 /* OBJECT or SIZE may have been passed through protect_from_queue.
2441 It is unsafe to save the value generated by protect_from_queue
2442 and reuse it later. Consider what happens if emit_queue is
2443 called before the return value from protect_from_queue is used.
2445 Expansion of the CALL_EXPR below will call emit_queue before
2446 we are finished emitting RTL for argument setup. So if we are
2447 not careful we could get the wrong value for an argument.
2449 To avoid this problem we go ahead and emit code to copy OBJECT
2450 and SIZE into new pseudos. We can then place those new pseudos
2451 into an RTL_EXPR and use them later, even after a call to
2454 Note this is not strictly needed for library calls since they
2455 do not call emit_queue before loading their arguments. However,
2456 we may need to have library calls call emit_queue in the future
2457 since failing to do so could cause problems for targets which
2458 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2459 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2461 #ifdef TARGET_MEM_FUNCTIONS
2462 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2464 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2465 TREE_UNSIGNED (integer_type_node));
2466 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2470 #ifdef TARGET_MEM_FUNCTIONS
2471 /* It is incorrect to use the libcall calling conventions to call
2472 memset in this context.
2474 This could be a user call to memset and the user may wish to
2475 examine the return value from memset.
2477 For targets where libcalls and normal calls have different
2478 conventions for returning pointers, we could end up generating
2481 So instead of using a libcall sequence we build up a suitable
2482 CALL_EXPR and expand the call in the normal fashion. */
2483 if (fn == NULL_TREE)
2487 /* This was copied from except.c, I don't know if all this is
2488 necessary in this context or not. */
2489 fn = get_identifier ("memset");
2490 push_obstacks_nochange ();
2491 end_temporary_allocation ();
2492 fntype = build_pointer_type (void_type_node);
2493 fntype = build_function_type (fntype, NULL_TREE);
2494 fn = build_decl (FUNCTION_DECL, fn, fntype);
2495 ggc_add_tree_root (&fn, 1);
2496 DECL_EXTERNAL (fn) = 1;
2497 TREE_PUBLIC (fn) = 1;
2498 DECL_ARTIFICIAL (fn) = 1;
2499 make_decl_rtl (fn, NULL_PTR, 1);
2500 assemble_external (fn);
2504 /* We need to make an argument list for the function call.
2506 memset has three arguments, the first is a void * addresses, the
2507 second a integer with the initialization value, the last is a
2508 size_t byte count for the copy. */
2510 = build_tree_list (NULL_TREE,
2511 make_tree (build_pointer_type (void_type_node),
2513 TREE_CHAIN (arg_list)
2514 = build_tree_list (NULL_TREE,
2515 make_tree (integer_type_node, const0_rtx));
2516 TREE_CHAIN (TREE_CHAIN (arg_list))
2517 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2518 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2520 /* Now we have to build up the CALL_EXPR itself. */
2521 call_expr = build1 (ADDR_EXPR,
2522 build_pointer_type (TREE_TYPE (fn)), fn);
2523 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2524 call_expr, arg_list, NULL_TREE);
2525 TREE_SIDE_EFFECTS (call_expr) = 1;
2527 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2529 emit_library_call (bzero_libfunc, 0,
2530 VOIDmode, 2, object, Pmode, size,
2531 TYPE_MODE (integer_type_node));
2536 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2541 /* Generate code to copy Y into X.
2542 Both Y and X must have the same mode, except that
2543 Y can be a constant with VOIDmode.
2544 This mode cannot be BLKmode; use emit_block_move for that.
2546 Return the last instruction emitted. */
2549 emit_move_insn (x, y)
2552 enum machine_mode mode = GET_MODE (x);
2554 x = protect_from_queue (x, 1);
2555 y = protect_from_queue (y, 0);
2557 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2560 /* Never force constant_p_rtx to memory. */
2561 if (GET_CODE (y) == CONSTANT_P_RTX)
2563 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2564 y = force_const_mem (mode, y);
2566 /* If X or Y are memory references, verify that their addresses are valid
2568 if (GET_CODE (x) == MEM
2569 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2570 && ! push_operand (x, GET_MODE (x)))
2572 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2573 x = change_address (x, VOIDmode, XEXP (x, 0));
2575 if (GET_CODE (y) == MEM
2576 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2578 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2579 y = change_address (y, VOIDmode, XEXP (y, 0));
2581 if (mode == BLKmode)
2584 return emit_move_insn_1 (x, y);
2587 /* Low level part of emit_move_insn.
2588 Called just like emit_move_insn, but assumes X and Y
2589 are basically valid. */
2592 emit_move_insn_1 (x, y)
2595 enum machine_mode mode = GET_MODE (x);
2596 enum machine_mode submode;
2597 enum mode_class class = GET_MODE_CLASS (mode);
2600 if (mode >= MAX_MACHINE_MODE)
2603 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2605 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2607 /* Expand complex moves by moving real part and imag part, if possible. */
2608 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2609 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2611 (class == MODE_COMPLEX_INT
2612 ? MODE_INT : MODE_FLOAT),
2614 && (mov_optab->handlers[(int) submode].insn_code
2615 != CODE_FOR_nothing))
2617 /* Don't split destination if it is a stack push. */
2618 int stack = push_operand (x, GET_MODE (x));
2620 /* If this is a stack, push the highpart first, so it
2621 will be in the argument order.
2623 In that case, change_address is used only to convert
2624 the mode, not to change the address. */
2627 /* Note that the real part always precedes the imag part in memory
2628 regardless of machine's endianness. */
2629 #ifdef STACK_GROWS_DOWNWARD
2630 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2631 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2632 gen_imagpart (submode, y)));
2633 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2634 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2635 gen_realpart (submode, y)));
2637 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2638 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2639 gen_realpart (submode, y)));
2640 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2641 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2642 gen_imagpart (submode, y)));
2647 rtx realpart_x, realpart_y;
2648 rtx imagpart_x, imagpart_y;
2650 /* If this is a complex value with each part being smaller than a
2651 word, the usual calling sequence will likely pack the pieces into
2652 a single register. Unfortunately, SUBREG of hard registers only
2653 deals in terms of words, so we have a problem converting input
2654 arguments to the CONCAT of two registers that is used elsewhere
2655 for complex values. If this is before reload, we can copy it into
2656 memory and reload. FIXME, we should see about using extract and
2657 insert on integer registers, but complex short and complex char
2658 variables should be rarely used. */
2659 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2660 && (reload_in_progress | reload_completed) == 0)
2662 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2663 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2665 if (packed_dest_p || packed_src_p)
2667 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2668 ? MODE_FLOAT : MODE_INT);
2670 enum machine_mode reg_mode =
2671 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2673 if (reg_mode != BLKmode)
2675 rtx mem = assign_stack_temp (reg_mode,
2676 GET_MODE_SIZE (mode), 0);
2678 rtx cmem = change_address (mem, mode, NULL_RTX);
2680 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2684 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2685 emit_move_insn_1 (cmem, y);
2686 return emit_move_insn_1 (sreg, mem);
2690 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2691 emit_move_insn_1 (mem, sreg);
2692 return emit_move_insn_1 (x, cmem);
2698 realpart_x = gen_realpart (submode, x);
2699 realpart_y = gen_realpart (submode, y);
2700 imagpart_x = gen_imagpart (submode, x);
2701 imagpart_y = gen_imagpart (submode, y);
2703 /* Show the output dies here. This is necessary for SUBREGs
2704 of pseudos since we cannot track their lifetimes correctly;
2705 hard regs shouldn't appear here except as return values.
2706 We never want to emit such a clobber after reload. */
2708 && ! (reload_in_progress || reload_completed)
2709 && (GET_CODE (realpart_x) == SUBREG
2710 || GET_CODE (imagpart_x) == SUBREG))
2712 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2715 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2716 (realpart_x, realpart_y));
2717 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2718 (imagpart_x, imagpart_y));
2721 return get_last_insn ();
2724 /* This will handle any multi-word mode that lacks a move_insn pattern.
2725 However, you will get better code if you define such patterns,
2726 even if they must turn into multiple assembler instructions. */
2727 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2733 #ifdef PUSH_ROUNDING
2735 /* If X is a push on the stack, do the push now and replace
2736 X with a reference to the stack pointer. */
2737 if (push_operand (x, GET_MODE (x)))
2739 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2740 x = change_address (x, VOIDmode, stack_pointer_rtx);
2744 /* If we are in reload, see if either operand is a MEM whose address
2745 is scheduled for replacement. */
2746 if (reload_in_progress && GET_CODE (x) == MEM
2747 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2749 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2751 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
2752 MEM_COPY_ATTRIBUTES (new, x);
2753 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
2756 if (reload_in_progress && GET_CODE (y) == MEM
2757 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2759 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2761 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (y);
2762 MEM_COPY_ATTRIBUTES (new, y);
2763 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (y);
2771 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2774 rtx xpart = operand_subword (x, i, 1, mode);
2775 rtx ypart = operand_subword (y, i, 1, mode);
2777 /* If we can't get a part of Y, put Y into memory if it is a
2778 constant. Otherwise, force it into a register. If we still
2779 can't get a part of Y, abort. */
2780 if (ypart == 0 && CONSTANT_P (y))
2782 y = force_const_mem (mode, y);
2783 ypart = operand_subword (y, i, 1, mode);
2785 else if (ypart == 0)
2786 ypart = operand_subword_force (y, i, mode);
2788 if (xpart == 0 || ypart == 0)
2791 need_clobber |= (GET_CODE (xpart) == SUBREG);
2793 last_insn = emit_move_insn (xpart, ypart);
2796 seq = gen_sequence ();
2799 /* Show the output dies here. This is necessary for SUBREGs
2800 of pseudos since we cannot track their lifetimes correctly;
2801 hard regs shouldn't appear here except as return values.
2802 We never want to emit such a clobber after reload. */
2804 && ! (reload_in_progress || reload_completed)
2805 && need_clobber != 0)
2807 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2818 /* Pushing data onto the stack. */
2820 /* Push a block of length SIZE (perhaps variable)
2821 and return an rtx to address the beginning of the block.
2822 Note that it is not possible for the value returned to be a QUEUED.
2823 The value may be virtual_outgoing_args_rtx.
2825 EXTRA is the number of bytes of padding to push in addition to SIZE.
2826 BELOW nonzero means this padding comes at low addresses;
2827 otherwise, the padding comes at high addresses. */
2830 push_block (size, extra, below)
2836 size = convert_modes (Pmode, ptr_mode, size, 1);
2837 if (CONSTANT_P (size))
2838 anti_adjust_stack (plus_constant (size, extra));
2839 else if (GET_CODE (size) == REG && extra == 0)
2840 anti_adjust_stack (size);
2843 rtx temp = copy_to_mode_reg (Pmode, size);
2845 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2846 temp, 0, OPTAB_LIB_WIDEN);
2847 anti_adjust_stack (temp);
2850 #ifndef STACK_GROWS_DOWNWARD
2851 #ifdef ARGS_GROW_DOWNWARD
2852 if (!ACCUMULATE_OUTGOING_ARGS)
2861 /* Return the lowest stack address when STACK or ARGS grow downward and
2862 we are not aaccumulating outgoing arguments (the c4x port uses such
2864 temp = virtual_outgoing_args_rtx;
2865 if (extra != 0 && below)
2866 temp = plus_constant (temp, extra);
2870 if (GET_CODE (size) == CONST_INT)
2871 temp = plus_constant (virtual_outgoing_args_rtx,
2872 - INTVAL (size) - (below ? 0 : extra));
2873 else if (extra != 0 && !below)
2874 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2875 negate_rtx (Pmode, plus_constant (size, extra)));
2877 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2878 negate_rtx (Pmode, size));
2881 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2887 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2890 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2891 block of SIZE bytes. */
2894 get_push_address (size)
2899 if (STACK_PUSH_CODE == POST_DEC)
2900 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2901 else if (STACK_PUSH_CODE == POST_INC)
2902 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2904 temp = stack_pointer_rtx;
2906 return copy_to_reg (temp);
2909 /* Generate code to push X onto the stack, assuming it has mode MODE and
2911 MODE is redundant except when X is a CONST_INT (since they don't
2913 SIZE is an rtx for the size of data to be copied (in bytes),
2914 needed only if X is BLKmode.
2916 ALIGN is maximum alignment we can assume.
2918 If PARTIAL and REG are both nonzero, then copy that many of the first
2919 words of X into registers starting with REG, and push the rest of X.
2920 The amount of space pushed is decreased by PARTIAL words,
2921 rounded *down* to a multiple of PARM_BOUNDARY.
2922 REG must be a hard register in this case.
2923 If REG is zero but PARTIAL is not, take any all others actions for an
2924 argument partially in registers, but do not actually load any
2927 EXTRA is the amount in bytes of extra space to leave next to this arg.
2928 This is ignored if an argument block has already been allocated.
2930 On a machine that lacks real push insns, ARGS_ADDR is the address of
2931 the bottom of the argument block for this call. We use indexing off there
2932 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2933 argument block has not been preallocated.
2935 ARGS_SO_FAR is the size of args previously pushed for this call.
2937 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2938 for arguments passed in registers. If nonzero, it will be the number
2939 of bytes required. */
2942 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2943 args_addr, args_so_far, reg_parm_stack_space,
2946 enum machine_mode mode;
2955 int reg_parm_stack_space;
2959 enum direction stack_direction
2960 #ifdef STACK_GROWS_DOWNWARD
2966 /* Decide where to pad the argument: `downward' for below,
2967 `upward' for above, or `none' for don't pad it.
2968 Default is below for small data on big-endian machines; else above. */
2969 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2971 /* Invert direction if stack is post-update. */
2972 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2973 if (where_pad != none)
2974 where_pad = (where_pad == downward ? upward : downward);
2976 xinner = x = protect_from_queue (x, 0);
2978 if (mode == BLKmode)
2980 /* Copy a block into the stack, entirely or partially. */
2983 int used = partial * UNITS_PER_WORD;
2984 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2992 /* USED is now the # of bytes we need not copy to the stack
2993 because registers will take care of them. */
2996 xinner = change_address (xinner, BLKmode,
2997 plus_constant (XEXP (xinner, 0), used));
2999 /* If the partial register-part of the arg counts in its stack size,
3000 skip the part of stack space corresponding to the registers.
3001 Otherwise, start copying to the beginning of the stack space,
3002 by setting SKIP to 0. */
3003 skip = (reg_parm_stack_space == 0) ? 0 : used;
3005 #ifdef PUSH_ROUNDING
3006 /* Do it with several push insns if that doesn't take lots of insns
3007 and if there is no difficulty with push insns that skip bytes
3008 on the stack for alignment purposes. */
3011 && GET_CODE (size) == CONST_INT
3013 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3014 /* Here we avoid the case of a structure whose weak alignment
3015 forces many pushes of a small amount of data,
3016 and such small pushes do rounding that causes trouble. */
3017 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3018 || align >= BIGGEST_ALIGNMENT
3019 || PUSH_ROUNDING (align) == align)
3020 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3022 /* Push padding now if padding above and stack grows down,
3023 or if padding below and stack grows up.
3024 But if space already allocated, this has already been done. */
3025 if (extra && args_addr == 0
3026 && where_pad != none && where_pad != stack_direction)
3027 anti_adjust_stack (GEN_INT (extra));
3029 stack_pointer_delta += INTVAL (size) - used;
3030 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3031 INTVAL (size) - used, align);
3033 if (current_function_check_memory_usage && ! in_check_memory_usage)
3037 in_check_memory_usage = 1;
3038 temp = get_push_address (INTVAL(size) - used);
3039 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3040 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3042 XEXP (xinner, 0), Pmode,
3043 GEN_INT (INTVAL(size) - used),
3044 TYPE_MODE (sizetype));
3046 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3048 GEN_INT (INTVAL(size) - used),
3049 TYPE_MODE (sizetype),
3050 GEN_INT (MEMORY_USE_RW),
3051 TYPE_MODE (integer_type_node));
3052 in_check_memory_usage = 0;
3056 #endif /* PUSH_ROUNDING */
3058 /* Otherwise make space on the stack and copy the data
3059 to the address of that space. */
3061 /* Deduct words put into registers from the size we must copy. */
3064 if (GET_CODE (size) == CONST_INT)
3065 size = GEN_INT (INTVAL (size) - used);
3067 size = expand_binop (GET_MODE (size), sub_optab, size,
3068 GEN_INT (used), NULL_RTX, 0,
3072 /* Get the address of the stack space.
3073 In this case, we do not deal with EXTRA separately.
3074 A single stack adjust will do. */
3077 temp = push_block (size, extra, where_pad == downward);
3080 else if (GET_CODE (args_so_far) == CONST_INT)
3081 temp = memory_address (BLKmode,
3082 plus_constant (args_addr,
3083 skip + INTVAL (args_so_far)));
3085 temp = memory_address (BLKmode,
3086 plus_constant (gen_rtx_PLUS (Pmode,
3090 if (current_function_check_memory_usage && ! in_check_memory_usage)
3094 in_check_memory_usage = 1;
3095 target = copy_to_reg (temp);
3096 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3097 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3099 XEXP (xinner, 0), Pmode,
3100 size, TYPE_MODE (sizetype));
3102 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3104 size, TYPE_MODE (sizetype),
3105 GEN_INT (MEMORY_USE_RW),
3106 TYPE_MODE (integer_type_node));
3107 in_check_memory_usage = 0;
3110 /* TEMP is the address of the block. Copy the data there. */
3111 if (GET_CODE (size) == CONST_INT
3112 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3114 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3115 INTVAL (size), align);
3120 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3121 enum machine_mode mode;
3122 rtx target = gen_rtx_MEM (BLKmode, temp);
3124 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3126 mode = GET_MODE_WIDER_MODE (mode))
3128 enum insn_code code = movstr_optab[(int) mode];
3129 insn_operand_predicate_fn pred;
3131 if (code != CODE_FOR_nothing
3132 && ((GET_CODE (size) == CONST_INT
3133 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3134 <= (GET_MODE_MASK (mode) >> 1)))
3135 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3136 && (!(pred = insn_data[(int) code].operand[0].predicate)
3137 || ((*pred) (target, BLKmode)))
3138 && (!(pred = insn_data[(int) code].operand[1].predicate)
3139 || ((*pred) (xinner, BLKmode)))
3140 && (!(pred = insn_data[(int) code].operand[3].predicate)
3141 || ((*pred) (opalign, VOIDmode))))
3143 rtx op2 = convert_to_mode (mode, size, 1);
3144 rtx last = get_last_insn ();
3147 pred = insn_data[(int) code].operand[2].predicate;
3148 if (pred != 0 && ! (*pred) (op2, mode))
3149 op2 = copy_to_mode_reg (mode, op2);
3151 pat = GEN_FCN ((int) code) (target, xinner,
3159 delete_insns_since (last);
3164 if (!ACCUMULATE_OUTGOING_ARGS)
3166 /* If the source is referenced relative to the stack pointer,
3167 copy it to another register to stabilize it. We do not need
3168 to do this if we know that we won't be changing sp. */
3170 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3171 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3172 temp = copy_to_reg (temp);
3175 /* Make inhibit_defer_pop nonzero around the library call
3176 to force it to pop the bcopy-arguments right away. */
3178 #ifdef TARGET_MEM_FUNCTIONS
3179 emit_library_call (memcpy_libfunc, 0,
3180 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3181 convert_to_mode (TYPE_MODE (sizetype),
3182 size, TREE_UNSIGNED (sizetype)),
3183 TYPE_MODE (sizetype));
3185 emit_library_call (bcopy_libfunc, 0,
3186 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3187 convert_to_mode (TYPE_MODE (integer_type_node),
3189 TREE_UNSIGNED (integer_type_node)),
3190 TYPE_MODE (integer_type_node));
3195 else if (partial > 0)
3197 /* Scalar partly in registers. */
3199 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3202 /* # words of start of argument
3203 that we must make space for but need not store. */
3204 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3205 int args_offset = INTVAL (args_so_far);
3208 /* Push padding now if padding above and stack grows down,
3209 or if padding below and stack grows up.
3210 But if space already allocated, this has already been done. */
3211 if (extra && args_addr == 0
3212 && where_pad != none && where_pad != stack_direction)
3213 anti_adjust_stack (GEN_INT (extra));
3215 /* If we make space by pushing it, we might as well push
3216 the real data. Otherwise, we can leave OFFSET nonzero
3217 and leave the space uninitialized. */
3221 /* Now NOT_STACK gets the number of words that we don't need to
3222 allocate on the stack. */
3223 not_stack = partial - offset;
3225 /* If the partial register-part of the arg counts in its stack size,
3226 skip the part of stack space corresponding to the registers.
3227 Otherwise, start copying to the beginning of the stack space,
3228 by setting SKIP to 0. */
3229 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3231 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3232 x = validize_mem (force_const_mem (mode, x));
3234 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3235 SUBREGs of such registers are not allowed. */
3236 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3237 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3238 x = copy_to_reg (x);
3240 /* Loop over all the words allocated on the stack for this arg. */
3241 /* We can do it by words, because any scalar bigger than a word
3242 has a size a multiple of a word. */
3243 #ifndef PUSH_ARGS_REVERSED
3244 for (i = not_stack; i < size; i++)
3246 for (i = size - 1; i >= not_stack; i--)
3248 if (i >= not_stack + offset)
3249 emit_push_insn (operand_subword_force (x, i, mode),
3250 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3252 GEN_INT (args_offset + ((i - not_stack + skip)
3254 reg_parm_stack_space, alignment_pad);
3259 rtx target = NULL_RTX;
3261 /* Push padding now if padding above and stack grows down,
3262 or if padding below and stack grows up.
3263 But if space already allocated, this has already been done. */
3264 if (extra && args_addr == 0
3265 && where_pad != none && where_pad != stack_direction)
3266 anti_adjust_stack (GEN_INT (extra));
3268 #ifdef PUSH_ROUNDING
3269 if (args_addr == 0 && PUSH_ARGS)
3271 addr = gen_push_operand ();
3272 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3277 if (GET_CODE (args_so_far) == CONST_INT)
3279 = memory_address (mode,
3280 plus_constant (args_addr,
3281 INTVAL (args_so_far)));
3283 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3288 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3290 if (current_function_check_memory_usage && ! in_check_memory_usage)
3292 in_check_memory_usage = 1;
3294 target = get_push_address (GET_MODE_SIZE (mode));
3296 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3297 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3300 GEN_INT (GET_MODE_SIZE (mode)),
3301 TYPE_MODE (sizetype));
3303 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3305 GEN_INT (GET_MODE_SIZE (mode)),
3306 TYPE_MODE (sizetype),
3307 GEN_INT (MEMORY_USE_RW),
3308 TYPE_MODE (integer_type_node));
3309 in_check_memory_usage = 0;
3314 /* If part should go in registers, copy that part
3315 into the appropriate registers. Do this now, at the end,
3316 since mem-to-mem copies above may do function calls. */
3317 if (partial > 0 && reg != 0)
3319 /* Handle calls that pass values in multiple non-contiguous locations.
3320 The Irix 6 ABI has examples of this. */
3321 if (GET_CODE (reg) == PARALLEL)
3322 emit_group_load (reg, x, -1, align); /* ??? size? */
3324 move_block_to_reg (REGNO (reg), x, partial, mode);
3327 if (extra && args_addr == 0 && where_pad == stack_direction)
3328 anti_adjust_stack (GEN_INT (extra));
3331 anti_adjust_stack (alignment_pad);
3334 /* Expand an assignment that stores the value of FROM into TO.
3335 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3336 (This may contain a QUEUED rtx;
3337 if the value is constant, this rtx is a constant.)
3338 Otherwise, the returned value is NULL_RTX.
3340 SUGGEST_REG is no longer actually used.
3341 It used to mean, copy the value through a register
3342 and return that register, if that is possible.
3343 We now use WANT_VALUE to decide whether to do this. */
3346 expand_assignment (to, from, want_value, suggest_reg)
3349 int suggest_reg ATTRIBUTE_UNUSED;
3351 register rtx to_rtx = 0;
3354 /* Don't crash if the lhs of the assignment was erroneous. */
3356 if (TREE_CODE (to) == ERROR_MARK)
3358 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3359 return want_value ? result : NULL_RTX;
3362 /* Assignment of a structure component needs special treatment
3363 if the structure component's rtx is not simply a MEM.
3364 Assignment of an array element at a constant index, and assignment of
3365 an array element in an unaligned packed structure field, has the same
3368 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3369 || TREE_CODE (to) == ARRAY_REF)
3371 enum machine_mode mode1;
3372 HOST_WIDE_INT bitsize, bitpos;
3377 unsigned int alignment;
3380 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3381 &unsignedp, &volatilep, &alignment);
3383 /* If we are going to use store_bit_field and extract_bit_field,
3384 make sure to_rtx will be safe for multiple use. */
3386 if (mode1 == VOIDmode && want_value)
3387 tem = stabilize_reference (tem);
3389 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3392 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3394 if (GET_CODE (to_rtx) != MEM)
3397 if (GET_MODE (offset_rtx) != ptr_mode)
3399 #ifdef POINTERS_EXTEND_UNSIGNED
3400 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3402 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3406 /* A constant address in TO_RTX can have VOIDmode, we must not try
3407 to call force_reg for that case. Avoid that case. */
3408 if (GET_CODE (to_rtx) == MEM
3409 && GET_MODE (to_rtx) == BLKmode
3410 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3412 && (bitpos % bitsize) == 0
3413 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3414 && alignment == GET_MODE_ALIGNMENT (mode1))
3416 rtx temp = change_address (to_rtx, mode1,
3417 plus_constant (XEXP (to_rtx, 0),
3420 if (GET_CODE (XEXP (temp, 0)) == REG)
3423 to_rtx = change_address (to_rtx, mode1,
3424 force_reg (GET_MODE (XEXP (temp, 0)),
3429 to_rtx = change_address (to_rtx, VOIDmode,
3430 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3431 force_reg (ptr_mode,
3437 if (GET_CODE (to_rtx) == MEM)
3439 /* When the offset is zero, to_rtx is the address of the
3440 structure we are storing into, and hence may be shared.
3441 We must make a new MEM before setting the volatile bit. */
3443 to_rtx = copy_rtx (to_rtx);
3445 MEM_VOLATILE_P (to_rtx) = 1;
3447 #if 0 /* This was turned off because, when a field is volatile
3448 in an object which is not volatile, the object may be in a register,
3449 and then we would abort over here. */
3455 if (TREE_CODE (to) == COMPONENT_REF
3456 && TREE_READONLY (TREE_OPERAND (to, 1)))
3459 to_rtx = copy_rtx (to_rtx);
3461 RTX_UNCHANGING_P (to_rtx) = 1;
3464 /* Check the access. */
3465 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3470 enum machine_mode best_mode;
3472 best_mode = get_best_mode (bitsize, bitpos,
3473 TYPE_ALIGN (TREE_TYPE (tem)),
3475 if (best_mode == VOIDmode)
3478 best_mode_size = GET_MODE_BITSIZE (best_mode);
3479 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3480 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3481 size *= GET_MODE_SIZE (best_mode);
3483 /* Check the access right of the pointer. */
3485 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3487 GEN_INT (size), TYPE_MODE (sizetype),
3488 GEN_INT (MEMORY_USE_WO),
3489 TYPE_MODE (integer_type_node));
3492 /* If this is a varying-length object, we must get the address of
3493 the source and do an explicit block move. */
3496 unsigned int from_align;
3497 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3499 = change_address (to_rtx, VOIDmode,
3500 plus_constant (XEXP (to_rtx, 0),
3501 bitpos / BITS_PER_UNIT));
3503 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3504 MIN (alignment, from_align));
3511 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3513 /* Spurious cast for HPUX compiler. */
3514 ? ((enum machine_mode)
3515 TYPE_MODE (TREE_TYPE (to)))
3519 int_size_in_bytes (TREE_TYPE (tem)),
3520 get_alias_set (to));
3522 preserve_temp_slots (result);
3526 /* If the value is meaningful, convert RESULT to the proper mode.
3527 Otherwise, return nothing. */
3528 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3529 TYPE_MODE (TREE_TYPE (from)),
3531 TREE_UNSIGNED (TREE_TYPE (to)))
3536 /* If the rhs is a function call and its value is not an aggregate,
3537 call the function before we start to compute the lhs.
3538 This is needed for correct code for cases such as
3539 val = setjmp (buf) on machines where reference to val
3540 requires loading up part of an address in a separate insn.
3542 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3543 since it might be a promoted variable where the zero- or sign- extension
3544 needs to be done. Handling this in the normal way is safe because no
3545 computation is done before the call. */
3546 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3547 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3548 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3549 && GET_CODE (DECL_RTL (to)) == REG))
3554 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3556 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3558 /* Handle calls that return values in multiple non-contiguous locations.
3559 The Irix 6 ABI has examples of this. */
3560 if (GET_CODE (to_rtx) == PARALLEL)
3561 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3562 TYPE_ALIGN (TREE_TYPE (from)));
3563 else if (GET_MODE (to_rtx) == BLKmode)
3564 emit_block_move (to_rtx, value, expr_size (from),
3565 TYPE_ALIGN (TREE_TYPE (from)));
3568 #ifdef POINTERS_EXTEND_UNSIGNED
3569 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3570 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3571 value = convert_memory_address (GET_MODE (to_rtx), value);
3573 emit_move_insn (to_rtx, value);
3575 preserve_temp_slots (to_rtx);
3578 return want_value ? to_rtx : NULL_RTX;
3581 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3582 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3586 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3587 if (GET_CODE (to_rtx) == MEM)
3588 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3591 /* Don't move directly into a return register. */
3592 if (TREE_CODE (to) == RESULT_DECL
3593 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3598 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3600 if (GET_CODE (to_rtx) == PARALLEL)
3601 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3602 TYPE_ALIGN (TREE_TYPE (from)));
3604 emit_move_insn (to_rtx, temp);
3606 preserve_temp_slots (to_rtx);
3609 return want_value ? to_rtx : NULL_RTX;
3612 /* In case we are returning the contents of an object which overlaps
3613 the place the value is being stored, use a safe function when copying
3614 a value through a pointer into a structure value return block. */
3615 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3616 && current_function_returns_struct
3617 && !current_function_returns_pcc_struct)
3622 size = expr_size (from);
3623 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3624 EXPAND_MEMORY_USE_DONT);
3626 /* Copy the rights of the bitmap. */
3627 if (current_function_check_memory_usage)
3628 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3629 XEXP (to_rtx, 0), Pmode,
3630 XEXP (from_rtx, 0), Pmode,
3631 convert_to_mode (TYPE_MODE (sizetype),
3632 size, TREE_UNSIGNED (sizetype)),
3633 TYPE_MODE (sizetype));
3635 #ifdef TARGET_MEM_FUNCTIONS
3636 emit_library_call (memcpy_libfunc, 0,
3637 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3638 XEXP (from_rtx, 0), Pmode,
3639 convert_to_mode (TYPE_MODE (sizetype),
3640 size, TREE_UNSIGNED (sizetype)),
3641 TYPE_MODE (sizetype));
3643 emit_library_call (bcopy_libfunc, 0,
3644 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3645 XEXP (to_rtx, 0), Pmode,
3646 convert_to_mode (TYPE_MODE (integer_type_node),
3647 size, TREE_UNSIGNED (integer_type_node)),
3648 TYPE_MODE (integer_type_node));
3651 preserve_temp_slots (to_rtx);
3654 return want_value ? to_rtx : NULL_RTX;
3657 /* Compute FROM and store the value in the rtx we got. */
3660 result = store_expr (from, to_rtx, want_value);
3661 preserve_temp_slots (result);
3664 return want_value ? result : NULL_RTX;
3667 /* Generate code for computing expression EXP,
3668 and storing the value into TARGET.
3669 TARGET may contain a QUEUED rtx.
3671 If WANT_VALUE is nonzero, return a copy of the value
3672 not in TARGET, so that we can be sure to use the proper
3673 value in a containing expression even if TARGET has something
3674 else stored in it. If possible, we copy the value through a pseudo
3675 and return that pseudo. Or, if the value is constant, we try to
3676 return the constant. In some cases, we return a pseudo
3677 copied *from* TARGET.
3679 If the mode is BLKmode then we may return TARGET itself.
3680 It turns out that in BLKmode it doesn't cause a problem.
3681 because C has no operators that could combine two different
3682 assignments into the same BLKmode object with different values
3683 with no sequence point. Will other languages need this to
3686 If WANT_VALUE is 0, we return NULL, to make sure
3687 to catch quickly any cases where the caller uses the value
3688 and fails to set WANT_VALUE. */
3691 store_expr (exp, target, want_value)
3693 register rtx target;
3697 int dont_return_target = 0;
3699 if (TREE_CODE (exp) == COMPOUND_EXPR)
3701 /* Perform first part of compound expression, then assign from second
3703 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3705 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3707 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3709 /* For conditional expression, get safe form of the target. Then
3710 test the condition, doing the appropriate assignment on either
3711 side. This avoids the creation of unnecessary temporaries.
3712 For non-BLKmode, it is more efficient not to do this. */
3714 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3717 target = protect_from_queue (target, 1);
3719 do_pending_stack_adjust ();
3721 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3722 start_cleanup_deferral ();
3723 store_expr (TREE_OPERAND (exp, 1), target, 0);
3724 end_cleanup_deferral ();
3726 emit_jump_insn (gen_jump (lab2));
3729 start_cleanup_deferral ();
3730 store_expr (TREE_OPERAND (exp, 2), target, 0);
3731 end_cleanup_deferral ();
3736 return want_value ? target : NULL_RTX;
3738 else if (queued_subexp_p (target))
3739 /* If target contains a postincrement, let's not risk
3740 using it as the place to generate the rhs. */
3742 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3744 /* Expand EXP into a new pseudo. */
3745 temp = gen_reg_rtx (GET_MODE (target));
3746 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3749 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3751 /* If target is volatile, ANSI requires accessing the value
3752 *from* the target, if it is accessed. So make that happen.
3753 In no case return the target itself. */
3754 if (! MEM_VOLATILE_P (target) && want_value)
3755 dont_return_target = 1;
3757 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3758 && GET_MODE (target) != BLKmode)
3759 /* If target is in memory and caller wants value in a register instead,
3760 arrange that. Pass TARGET as target for expand_expr so that,
3761 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3762 We know expand_expr will not use the target in that case.
3763 Don't do this if TARGET is volatile because we are supposed
3764 to write it and then read it. */
3766 temp = expand_expr (exp, target, GET_MODE (target), 0);
3767 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3768 temp = copy_to_reg (temp);
3769 dont_return_target = 1;
3771 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3772 /* If this is an scalar in a register that is stored in a wider mode
3773 than the declared mode, compute the result into its declared mode
3774 and then convert to the wider mode. Our value is the computed
3777 /* If we don't want a value, we can do the conversion inside EXP,
3778 which will often result in some optimizations. Do the conversion
3779 in two steps: first change the signedness, if needed, then
3780 the extend. But don't do this if the type of EXP is a subtype
3781 of something else since then the conversion might involve
3782 more than just converting modes. */
3783 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3784 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3786 if (TREE_UNSIGNED (TREE_TYPE (exp))
3787 != SUBREG_PROMOTED_UNSIGNED_P (target))
3790 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3794 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3795 SUBREG_PROMOTED_UNSIGNED_P (target)),
3799 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3801 /* If TEMP is a volatile MEM and we want a result value, make
3802 the access now so it gets done only once. Likewise if
3803 it contains TARGET. */
3804 if (GET_CODE (temp) == MEM && want_value
3805 && (MEM_VOLATILE_P (temp)
3806 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3807 temp = copy_to_reg (temp);
3809 /* If TEMP is a VOIDmode constant, use convert_modes to make
3810 sure that we properly convert it. */
3811 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3812 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3813 TYPE_MODE (TREE_TYPE (exp)), temp,
3814 SUBREG_PROMOTED_UNSIGNED_P (target));
3816 convert_move (SUBREG_REG (target), temp,
3817 SUBREG_PROMOTED_UNSIGNED_P (target));
3819 /* If we promoted a constant, change the mode back down to match
3820 target. Otherwise, the caller might get confused by a result whose
3821 mode is larger than expected. */
3823 if (want_value && GET_MODE (temp) != GET_MODE (target)
3824 && GET_MODE (temp) != VOIDmode)
3826 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3827 SUBREG_PROMOTED_VAR_P (temp) = 1;
3828 SUBREG_PROMOTED_UNSIGNED_P (temp)
3829 = SUBREG_PROMOTED_UNSIGNED_P (target);
3832 return want_value ? temp : NULL_RTX;
3836 temp = expand_expr (exp, target, GET_MODE (target), 0);
3837 /* Return TARGET if it's a specified hardware register.
3838 If TARGET is a volatile mem ref, either return TARGET
3839 or return a reg copied *from* TARGET; ANSI requires this.
3841 Otherwise, if TEMP is not TARGET, return TEMP
3842 if it is constant (for efficiency),
3843 or if we really want the correct value. */
3844 if (!(target && GET_CODE (target) == REG
3845 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3846 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3847 && ! rtx_equal_p (temp, target)
3848 && (CONSTANT_P (temp) || want_value))
3849 dont_return_target = 1;
3852 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3853 the same as that of TARGET, adjust the constant. This is needed, for
3854 example, in case it is a CONST_DOUBLE and we want only a word-sized
3856 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3857 && TREE_CODE (exp) != ERROR_MARK
3858 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3859 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3860 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3862 if (current_function_check_memory_usage
3863 && GET_CODE (target) == MEM
3864 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3866 if (GET_CODE (temp) == MEM)
3867 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3868 XEXP (target, 0), Pmode,
3869 XEXP (temp, 0), Pmode,
3870 expr_size (exp), TYPE_MODE (sizetype));
3872 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3873 XEXP (target, 0), Pmode,
3874 expr_size (exp), TYPE_MODE (sizetype),
3875 GEN_INT (MEMORY_USE_WO),
3876 TYPE_MODE (integer_type_node));
3879 /* If value was not generated in the target, store it there.
3880 Convert the value to TARGET's type first if nec. */
3881 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3882 one or both of them are volatile memory refs, we have to distinguish
3884 - expand_expr has used TARGET. In this case, we must not generate
3885 another copy. This can be detected by TARGET being equal according
3887 - expand_expr has not used TARGET - that means that the source just
3888 happens to have the same RTX form. Since temp will have been created
3889 by expand_expr, it will compare unequal according to == .
3890 We must generate a copy in this case, to reach the correct number
3891 of volatile memory references. */
3893 if ((! rtx_equal_p (temp, target)
3894 || (temp != target && (side_effects_p (temp)
3895 || side_effects_p (target))))
3896 && TREE_CODE (exp) != ERROR_MARK)
3898 target = protect_from_queue (target, 1);
3899 if (GET_MODE (temp) != GET_MODE (target)
3900 && GET_MODE (temp) != VOIDmode)
3902 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3903 if (dont_return_target)
3905 /* In this case, we will return TEMP,
3906 so make sure it has the proper mode.
3907 But don't forget to store the value into TARGET. */
3908 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3909 emit_move_insn (target, temp);
3912 convert_move (target, temp, unsignedp);
3915 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3917 /* Handle copying a string constant into an array.
3918 The string constant may be shorter than the array.
3919 So copy just the string's actual length, and clear the rest. */
3923 /* Get the size of the data type of the string,
3924 which is actually the size of the target. */
3925 size = expr_size (exp);
3926 if (GET_CODE (size) == CONST_INT
3927 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3928 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
3931 /* Compute the size of the data to copy from the string. */
3933 = size_binop (MIN_EXPR,
3934 make_tree (sizetype, size),
3935 size_int (TREE_STRING_LENGTH (exp)));
3936 int align = TYPE_ALIGN (TREE_TYPE (exp));
3937 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3941 /* Copy that much. */
3942 emit_block_move (target, temp, copy_size_rtx,
3943 TYPE_ALIGN (TREE_TYPE (exp)));
3945 /* Figure out how much is left in TARGET that we have to clear.
3946 Do all calculations in ptr_mode. */
3948 addr = XEXP (target, 0);
3949 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3951 if (GET_CODE (copy_size_rtx) == CONST_INT)
3953 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3954 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3955 align = MIN (align, (BITS_PER_UNIT
3956 * (INTVAL (copy_size_rtx)
3957 & - INTVAL (copy_size_rtx))));
3961 addr = force_reg (ptr_mode, addr);
3962 addr = expand_binop (ptr_mode, add_optab, addr,
3963 copy_size_rtx, NULL_RTX, 0,
3966 size = expand_binop (ptr_mode, sub_optab, size,
3967 copy_size_rtx, NULL_RTX, 0,
3970 align = BITS_PER_UNIT;
3971 label = gen_label_rtx ();
3972 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3973 GET_MODE (size), 0, 0, label);
3975 align = MIN (align, expr_align (copy_size));
3977 if (size != const0_rtx)
3979 /* Be sure we can write on ADDR. */
3980 if (current_function_check_memory_usage)
3981 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3983 size, TYPE_MODE (sizetype),
3984 GEN_INT (MEMORY_USE_WO),
3985 TYPE_MODE (integer_type_node));
3986 clear_storage (gen_rtx_MEM (BLKmode, addr), size, align);
3993 /* Handle calls that return values in multiple non-contiguous locations.
3994 The Irix 6 ABI has examples of this. */
3995 else if (GET_CODE (target) == PARALLEL)
3996 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3997 TYPE_ALIGN (TREE_TYPE (exp)));
3998 else if (GET_MODE (temp) == BLKmode)
3999 emit_block_move (target, temp, expr_size (exp),
4000 TYPE_ALIGN (TREE_TYPE (exp)));
4002 emit_move_insn (target, temp);
4005 /* If we don't want a value, return NULL_RTX. */
4009 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4010 ??? The latter test doesn't seem to make sense. */
4011 else if (dont_return_target && GET_CODE (temp) != MEM)
4014 /* Return TARGET itself if it is a hard register. */
4015 else if (want_value && GET_MODE (target) != BLKmode
4016 && ! (GET_CODE (target) == REG
4017 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4018 return copy_to_reg (target);
4024 /* Return 1 if EXP just contains zeros. */
4032 switch (TREE_CODE (exp))
4036 case NON_LVALUE_EXPR:
4037 return is_zeros_p (TREE_OPERAND (exp, 0));
4040 return integer_zerop (exp);
4044 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4047 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4050 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4051 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4052 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4053 if (! is_zeros_p (TREE_VALUE (elt)))
4063 /* Return 1 if EXP contains mostly (3/4) zeros. */
4066 mostly_zeros_p (exp)
4069 if (TREE_CODE (exp) == CONSTRUCTOR)
4071 int elts = 0, zeros = 0;
4072 tree elt = CONSTRUCTOR_ELTS (exp);
4073 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4075 /* If there are no ranges of true bits, it is all zero. */
4076 return elt == NULL_TREE;
4078 for (; elt; elt = TREE_CHAIN (elt))
4080 /* We do not handle the case where the index is a RANGE_EXPR,
4081 so the statistic will be somewhat inaccurate.
4082 We do make a more accurate count in store_constructor itself,
4083 so since this function is only used for nested array elements,
4084 this should be close enough. */
4085 if (mostly_zeros_p (TREE_VALUE (elt)))
4090 return 4 * zeros >= 3 * elts;
4093 return is_zeros_p (exp);
4096 /* Helper function for store_constructor.
4097 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4098 TYPE is the type of the CONSTRUCTOR, not the element type.
4099 ALIGN and CLEARED are as for store_constructor.
4101 This provides a recursive shortcut back to store_constructor when it isn't
4102 necessary to go through store_field. This is so that we can pass through
4103 the cleared field to let store_constructor know that we may not have to
4104 clear a substructure if the outer structure has already been cleared. */
4107 store_constructor_field (target, bitsize, bitpos,
4108 mode, exp, type, align, cleared)
4110 unsigned HOST_WIDE_INT bitsize;
4111 HOST_WIDE_INT bitpos;
4112 enum machine_mode mode;
4117 if (TREE_CODE (exp) == CONSTRUCTOR
4118 && bitpos % BITS_PER_UNIT == 0
4119 /* If we have a non-zero bitpos for a register target, then we just
4120 let store_field do the bitfield handling. This is unlikely to
4121 generate unnecessary clear instructions anyways. */
4122 && (bitpos == 0 || GET_CODE (target) == MEM))
4126 = change_address (target,
4127 GET_MODE (target) == BLKmode
4129 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4130 ? BLKmode : VOIDmode,
4131 plus_constant (XEXP (target, 0),
4132 bitpos / BITS_PER_UNIT));
4133 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4136 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4137 int_size_in_bytes (type), 0);
4140 /* Store the value of constructor EXP into the rtx TARGET.
4141 TARGET is either a REG or a MEM.
4142 ALIGN is the maximum known alignment for TARGET.
4143 CLEARED is true if TARGET is known to have been zero'd.
4144 SIZE is the number of bytes of TARGET we are allowed to modify: this
4145 may not be the same as the size of EXP if we are assigning to a field
4146 which has been packed to exclude padding bits. */
4149 store_constructor (exp, target, align, cleared, size)
4156 tree type = TREE_TYPE (exp);
4157 #ifdef WORD_REGISTER_OPERATIONS
4158 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4161 /* We know our target cannot conflict, since safe_from_p has been called. */
4163 /* Don't try copying piece by piece into a hard register
4164 since that is vulnerable to being clobbered by EXP.
4165 Instead, construct in a pseudo register and then copy it all. */
4166 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4168 rtx temp = gen_reg_rtx (GET_MODE (target));
4169 store_constructor (exp, temp, align, cleared, size);
4170 emit_move_insn (target, temp);
4175 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4176 || TREE_CODE (type) == QUAL_UNION_TYPE)
4180 /* Inform later passes that the whole union value is dead. */
4181 if ((TREE_CODE (type) == UNION_TYPE
4182 || TREE_CODE (type) == QUAL_UNION_TYPE)
4185 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4187 /* If the constructor is empty, clear the union. */
4188 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4189 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4192 /* If we are building a static constructor into a register,
4193 set the initial value as zero so we can fold the value into
4194 a constant. But if more than one register is involved,
4195 this probably loses. */
4196 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4197 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4200 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4205 /* If the constructor has fewer fields than the structure
4206 or if we are initializing the structure to mostly zeros,
4207 clear the whole structure first. */
4209 && ((list_length (CONSTRUCTOR_ELTS (exp))
4210 != fields_length (type))
4211 || mostly_zeros_p (exp)))
4214 clear_storage (target, GEN_INT (size), align);
4219 /* Inform later passes that the old value is dead. */
4220 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4222 /* Store each element of the constructor into
4223 the corresponding field of TARGET. */
4225 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4227 register tree field = TREE_PURPOSE (elt);
4228 #ifdef WORD_REGISTER_OPERATIONS
4229 tree value = TREE_VALUE (elt);
4231 register enum machine_mode mode;
4232 HOST_WIDE_INT bitsize;
4233 HOST_WIDE_INT bitpos = 0;
4236 rtx to_rtx = target;
4238 /* Just ignore missing fields.
4239 We cleared the whole structure, above,
4240 if any fields are missing. */
4244 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4247 if (host_integerp (DECL_SIZE (field), 1))
4248 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4252 unsignedp = TREE_UNSIGNED (field);
4253 mode = DECL_MODE (field);
4254 if (DECL_BIT_FIELD (field))
4257 offset = DECL_FIELD_OFFSET (field);
4258 if (host_integerp (offset, 0)
4259 && host_integerp (bit_position (field), 0))
4261 bitpos = int_bit_position (field);
4265 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4271 if (contains_placeholder_p (offset))
4272 offset = build (WITH_RECORD_EXPR, sizetype,
4273 offset, make_tree (TREE_TYPE (exp), target));
4275 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4276 if (GET_CODE (to_rtx) != MEM)
4279 if (GET_MODE (offset_rtx) != ptr_mode)
4281 #ifdef POINTERS_EXTEND_UNSIGNED
4282 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4284 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4289 = change_address (to_rtx, VOIDmode,
4290 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4291 force_reg (ptr_mode,
4293 align = DECL_OFFSET_ALIGN (field);
4296 if (TREE_READONLY (field))
4298 if (GET_CODE (to_rtx) == MEM)
4299 to_rtx = copy_rtx (to_rtx);
4301 RTX_UNCHANGING_P (to_rtx) = 1;
4304 #ifdef WORD_REGISTER_OPERATIONS
4305 /* If this initializes a field that is smaller than a word, at the
4306 start of a word, try to widen it to a full word.
4307 This special case allows us to output C++ member function
4308 initializations in a form that the optimizers can understand. */
4309 if (GET_CODE (target) == REG
4310 && bitsize < BITS_PER_WORD
4311 && bitpos % BITS_PER_WORD == 0
4312 && GET_MODE_CLASS (mode) == MODE_INT
4313 && TREE_CODE (value) == INTEGER_CST
4315 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4317 tree type = TREE_TYPE (value);
4318 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4320 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4321 value = convert (type, value);
4323 if (BYTES_BIG_ENDIAN)
4325 = fold (build (LSHIFT_EXPR, type, value,
4326 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4327 bitsize = BITS_PER_WORD;
4331 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4332 TREE_VALUE (elt), type, align, cleared);
4335 else if (TREE_CODE (type) == ARRAY_TYPE)
4340 tree domain = TYPE_DOMAIN (type);
4341 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4342 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4343 tree elttype = TREE_TYPE (type);
4345 /* If the constructor has fewer elements than the array,
4346 clear the whole array first. Similarly if this is
4347 static constructor of a non-BLKmode object. */
4348 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4352 HOST_WIDE_INT count = 0, zero_count = 0;
4354 /* This loop is a more accurate version of the loop in
4355 mostly_zeros_p (it handles RANGE_EXPR in an index).
4356 It is also needed to check for missing elements. */
4357 for (elt = CONSTRUCTOR_ELTS (exp);
4359 elt = TREE_CHAIN (elt))
4361 tree index = TREE_PURPOSE (elt);
4362 HOST_WIDE_INT this_node_count;
4364 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4366 tree lo_index = TREE_OPERAND (index, 0);
4367 tree hi_index = TREE_OPERAND (index, 1);
4369 if (! host_integerp (lo_index, 1)
4370 || ! host_integerp (hi_index, 1))
4376 this_node_count = (tree_low_cst (hi_index, 1)
4377 - tree_low_cst (lo_index, 1) + 1);
4380 this_node_count = 1;
4381 count += this_node_count;
4382 if (mostly_zeros_p (TREE_VALUE (elt)))
4383 zero_count += this_node_count;
4385 /* Clear the entire array first if there are any missing elements,
4386 or if the incidence of zero elements is >= 75%. */
4387 if (count < maxelt - minelt + 1
4388 || 4 * zero_count >= 3 * count)
4391 if (need_to_clear && size > 0)
4394 clear_storage (target, GEN_INT (size), align);
4398 /* Inform later passes that the old value is dead. */
4399 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4401 /* Store each element of the constructor into
4402 the corresponding element of TARGET, determined
4403 by counting the elements. */
4404 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4406 elt = TREE_CHAIN (elt), i++)
4408 register enum machine_mode mode;
4409 HOST_WIDE_INT bitsize;
4410 HOST_WIDE_INT bitpos;
4412 tree value = TREE_VALUE (elt);
4413 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4414 tree index = TREE_PURPOSE (elt);
4415 rtx xtarget = target;
4417 if (cleared && is_zeros_p (value))
4420 unsignedp = TREE_UNSIGNED (elttype);
4421 mode = TYPE_MODE (elttype);
4422 if (mode == BLKmode)
4423 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4424 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4427 bitsize = GET_MODE_BITSIZE (mode);
4429 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4431 tree lo_index = TREE_OPERAND (index, 0);
4432 tree hi_index = TREE_OPERAND (index, 1);
4433 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4434 struct nesting *loop;
4435 HOST_WIDE_INT lo, hi, count;
4438 /* If the range is constant and "small", unroll the loop. */
4439 if (host_integerp (lo_index, 0)
4440 && host_integerp (hi_index, 0)
4441 && (lo = tree_low_cst (lo_index, 0),
4442 hi = tree_low_cst (hi_index, 0),
4443 count = hi - lo + 1,
4444 (GET_CODE (target) != MEM
4446 || (host_integerp (TYPE_SIZE (elttype), 1)
4447 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4450 lo -= minelt; hi -= minelt;
4451 for (; lo <= hi; lo++)
4453 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4454 store_constructor_field (target, bitsize, bitpos, mode,
4455 value, type, align, cleared);
4460 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4461 loop_top = gen_label_rtx ();
4462 loop_end = gen_label_rtx ();
4464 unsignedp = TREE_UNSIGNED (domain);
4466 index = build_decl (VAR_DECL, NULL_TREE, domain);
4468 DECL_RTL (index) = index_r
4469 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4472 if (TREE_CODE (value) == SAVE_EXPR
4473 && SAVE_EXPR_RTL (value) == 0)
4475 /* Make sure value gets expanded once before the
4477 expand_expr (value, const0_rtx, VOIDmode, 0);
4480 store_expr (lo_index, index_r, 0);
4481 loop = expand_start_loop (0);
4483 /* Assign value to element index. */
4485 = convert (ssizetype,
4486 fold (build (MINUS_EXPR, TREE_TYPE (index),
4487 index, TYPE_MIN_VALUE (domain))));
4488 position = size_binop (MULT_EXPR, position,
4490 TYPE_SIZE_UNIT (elttype)));
4492 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4493 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4494 xtarget = change_address (target, mode, addr);
4495 if (TREE_CODE (value) == CONSTRUCTOR)
4496 store_constructor (value, xtarget, align, cleared,
4497 bitsize / BITS_PER_UNIT);
4499 store_expr (value, xtarget, 0);
4501 expand_exit_loop_if_false (loop,
4502 build (LT_EXPR, integer_type_node,
4505 expand_increment (build (PREINCREMENT_EXPR,
4507 index, integer_one_node), 0, 0);
4509 emit_label (loop_end);
4512 else if ((index != 0 && ! host_integerp (index, 0))
4513 || ! host_integerp (TYPE_SIZE (elttype), 1))
4519 index = ssize_int (1);
4522 index = convert (ssizetype,
4523 fold (build (MINUS_EXPR, index,
4524 TYPE_MIN_VALUE (domain))));
4526 position = size_binop (MULT_EXPR, index,
4528 TYPE_SIZE_UNIT (elttype)));
4529 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4530 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4531 xtarget = change_address (target, mode, addr);
4532 store_expr (value, xtarget, 0);
4537 bitpos = ((tree_low_cst (index, 0) - minelt)
4538 * tree_low_cst (TYPE_SIZE (elttype), 1));
4540 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4542 store_constructor_field (target, bitsize, bitpos, mode, value,
4543 type, align, cleared);
4548 /* Set constructor assignments */
4549 else if (TREE_CODE (type) == SET_TYPE)
4551 tree elt = CONSTRUCTOR_ELTS (exp);
4552 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4553 tree domain = TYPE_DOMAIN (type);
4554 tree domain_min, domain_max, bitlength;
4556 /* The default implementation strategy is to extract the constant
4557 parts of the constructor, use that to initialize the target,
4558 and then "or" in whatever non-constant ranges we need in addition.
4560 If a large set is all zero or all ones, it is
4561 probably better to set it using memset (if available) or bzero.
4562 Also, if a large set has just a single range, it may also be
4563 better to first clear all the first clear the set (using
4564 bzero/memset), and set the bits we want. */
4566 /* Check for all zeros. */
4567 if (elt == NULL_TREE && size > 0)
4570 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4574 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4575 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4576 bitlength = size_binop (PLUS_EXPR,
4577 size_diffop (domain_max, domain_min),
4580 nbits = tree_low_cst (bitlength, 1);
4582 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4583 are "complicated" (more than one range), initialize (the
4584 constant parts) by copying from a constant. */
4585 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4586 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4588 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4589 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4590 char *bit_buffer = (char *) alloca (nbits);
4591 HOST_WIDE_INT word = 0;
4592 unsigned int bit_pos = 0;
4593 unsigned int ibit = 0;
4594 unsigned int offset = 0; /* In bytes from beginning of set. */
4596 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4599 if (bit_buffer[ibit])
4601 if (BYTES_BIG_ENDIAN)
4602 word |= (1 << (set_word_size - 1 - bit_pos));
4604 word |= 1 << bit_pos;
4608 if (bit_pos >= set_word_size || ibit == nbits)
4610 if (word != 0 || ! cleared)
4612 rtx datum = GEN_INT (word);
4615 /* The assumption here is that it is safe to use
4616 XEXP if the set is multi-word, but not if
4617 it's single-word. */
4618 if (GET_CODE (target) == MEM)
4620 to_rtx = plus_constant (XEXP (target, 0), offset);
4621 to_rtx = change_address (target, mode, to_rtx);
4623 else if (offset == 0)
4627 emit_move_insn (to_rtx, datum);
4634 offset += set_word_size / BITS_PER_UNIT;
4639 /* Don't bother clearing storage if the set is all ones. */
4640 if (TREE_CHAIN (elt) != NULL_TREE
4641 || (TREE_PURPOSE (elt) == NULL_TREE
4643 : ( ! host_integerp (TREE_VALUE (elt), 0)
4644 || ! host_integerp (TREE_PURPOSE (elt), 0)
4645 || (tree_low_cst (TREE_VALUE (elt), 0)
4646 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4647 != (HOST_WIDE_INT) nbits))))
4648 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4650 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4652 /* start of range of element or NULL */
4653 tree startbit = TREE_PURPOSE (elt);
4654 /* end of range of element, or element value */
4655 tree endbit = TREE_VALUE (elt);
4656 #ifdef TARGET_MEM_FUNCTIONS
4657 HOST_WIDE_INT startb, endb;
4659 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4661 bitlength_rtx = expand_expr (bitlength,
4662 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4664 /* handle non-range tuple element like [ expr ] */
4665 if (startbit == NULL_TREE)
4667 startbit = save_expr (endbit);
4671 startbit = convert (sizetype, startbit);
4672 endbit = convert (sizetype, endbit);
4673 if (! integer_zerop (domain_min))
4675 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4676 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4678 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4679 EXPAND_CONST_ADDRESS);
4680 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4681 EXPAND_CONST_ADDRESS);
4685 targetx = assign_stack_temp (GET_MODE (target),
4686 GET_MODE_SIZE (GET_MODE (target)),
4688 emit_move_insn (targetx, target);
4691 else if (GET_CODE (target) == MEM)
4696 #ifdef TARGET_MEM_FUNCTIONS
4697 /* Optimization: If startbit and endbit are
4698 constants divisible by BITS_PER_UNIT,
4699 call memset instead. */
4700 if (TREE_CODE (startbit) == INTEGER_CST
4701 && TREE_CODE (endbit) == INTEGER_CST
4702 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4703 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4705 emit_library_call (memset_libfunc, 0,
4707 plus_constant (XEXP (targetx, 0),
4708 startb / BITS_PER_UNIT),
4710 constm1_rtx, TYPE_MODE (integer_type_node),
4711 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4712 TYPE_MODE (sizetype));
4716 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4717 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4718 bitlength_rtx, TYPE_MODE (sizetype),
4719 startbit_rtx, TYPE_MODE (sizetype),
4720 endbit_rtx, TYPE_MODE (sizetype));
4723 emit_move_insn (target, targetx);
4731 /* Store the value of EXP (an expression tree)
4732 into a subfield of TARGET which has mode MODE and occupies
4733 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4734 If MODE is VOIDmode, it means that we are storing into a bit-field.
4736 If VALUE_MODE is VOIDmode, return nothing in particular.
4737 UNSIGNEDP is not used in this case.
4739 Otherwise, return an rtx for the value stored. This rtx
4740 has mode VALUE_MODE if that is convenient to do.
4741 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4743 ALIGN is the alignment that TARGET is known to have.
4744 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4746 ALIAS_SET is the alias set for the destination. This value will
4747 (in general) be different from that for TARGET, since TARGET is a
4748 reference to the containing structure. */
4751 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4752 unsignedp, align, total_size, alias_set)
4754 HOST_WIDE_INT bitsize;
4755 HOST_WIDE_INT bitpos;
4756 enum machine_mode mode;
4758 enum machine_mode value_mode;
4761 HOST_WIDE_INT total_size;
4764 HOST_WIDE_INT width_mask = 0;
4766 if (TREE_CODE (exp) == ERROR_MARK)
4769 if (bitsize < HOST_BITS_PER_WIDE_INT)
4770 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4772 /* If we are storing into an unaligned field of an aligned union that is
4773 in a register, we may have the mode of TARGET being an integer mode but
4774 MODE == BLKmode. In that case, get an aligned object whose size and
4775 alignment are the same as TARGET and store TARGET into it (we can avoid
4776 the store if the field being stored is the entire width of TARGET). Then
4777 call ourselves recursively to store the field into a BLKmode version of
4778 that object. Finally, load from the object into TARGET. This is not
4779 very efficient in general, but should only be slightly more expensive
4780 than the otherwise-required unaligned accesses. Perhaps this can be
4781 cleaned up later. */
4784 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4786 rtx object = assign_stack_temp (GET_MODE (target),
4787 GET_MODE_SIZE (GET_MODE (target)), 0);
4788 rtx blk_object = copy_rtx (object);
4790 MEM_SET_IN_STRUCT_P (object, 1);
4791 MEM_SET_IN_STRUCT_P (blk_object, 1);
4792 PUT_MODE (blk_object, BLKmode);
4794 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4795 emit_move_insn (object, target);
4797 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4798 align, total_size, alias_set);
4800 /* Even though we aren't returning target, we need to
4801 give it the updated value. */
4802 emit_move_insn (target, object);
4807 if (GET_CODE (target) == CONCAT)
4809 /* We're storing into a struct containing a single __complex. */
4813 return store_expr (exp, target, 0);
4816 /* If the structure is in a register or if the component
4817 is a bit field, we cannot use addressing to access it.
4818 Use bit-field techniques or SUBREG to store in it. */
4820 if (mode == VOIDmode
4821 || (mode != BLKmode && ! direct_store[(int) mode]
4822 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4823 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4824 || GET_CODE (target) == REG
4825 || GET_CODE (target) == SUBREG
4826 /* If the field isn't aligned enough to store as an ordinary memref,
4827 store it as a bit field. */
4828 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4829 && (align < GET_MODE_ALIGNMENT (mode)
4830 || bitpos % GET_MODE_ALIGNMENT (mode)))
4831 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4832 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4833 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4834 /* If the RHS and field are a constant size and the size of the
4835 RHS isn't the same size as the bitfield, we must use bitfield
4838 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4839 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4841 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4843 /* If BITSIZE is narrower than the size of the type of EXP
4844 we will be narrowing TEMP. Normally, what's wanted are the
4845 low-order bits. However, if EXP's type is a record and this is
4846 big-endian machine, we want the upper BITSIZE bits. */
4847 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4848 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4849 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4850 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4851 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4855 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4857 if (mode != VOIDmode && mode != BLKmode
4858 && mode != TYPE_MODE (TREE_TYPE (exp)))
4859 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4861 /* If the modes of TARGET and TEMP are both BLKmode, both
4862 must be in memory and BITPOS must be aligned on a byte
4863 boundary. If so, we simply do a block copy. */
4864 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4866 unsigned int exp_align = expr_align (exp);
4868 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4869 || bitpos % BITS_PER_UNIT != 0)
4872 target = change_address (target, VOIDmode,
4873 plus_constant (XEXP (target, 0),
4874 bitpos / BITS_PER_UNIT));
4876 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4877 align = MIN (exp_align, align);
4879 /* Find an alignment that is consistent with the bit position. */
4880 while ((bitpos % align) != 0)
4883 emit_block_move (target, temp,
4884 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4888 return value_mode == VOIDmode ? const0_rtx : target;
4891 /* Store the value in the bitfield. */
4892 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4893 if (value_mode != VOIDmode)
4895 /* The caller wants an rtx for the value. */
4896 /* If possible, avoid refetching from the bitfield itself. */
4898 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4901 enum machine_mode tmode;
4904 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4905 tmode = GET_MODE (temp);
4906 if (tmode == VOIDmode)
4908 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4909 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4910 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4912 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4913 NULL_RTX, value_mode, 0, align,
4920 rtx addr = XEXP (target, 0);
4923 /* If a value is wanted, it must be the lhs;
4924 so make the address stable for multiple use. */
4926 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4927 && ! CONSTANT_ADDRESS_P (addr)
4928 /* A frame-pointer reference is already stable. */
4929 && ! (GET_CODE (addr) == PLUS
4930 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4931 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4932 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4933 addr = copy_to_reg (addr);
4935 /* Now build a reference to just the desired component. */
4937 to_rtx = copy_rtx (change_address (target, mode,
4938 plus_constant (addr,
4940 / BITS_PER_UNIT))));
4941 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4942 MEM_ALIAS_SET (to_rtx) = alias_set;
4944 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4948 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4949 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4950 ARRAY_REFs and find the ultimate containing object, which we return.
4952 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4953 bit position, and *PUNSIGNEDP to the signedness of the field.
4954 If the position of the field is variable, we store a tree
4955 giving the variable offset (in units) in *POFFSET.
4956 This offset is in addition to the bit position.
4957 If the position is not variable, we store 0 in *POFFSET.
4958 We set *PALIGNMENT to the alignment of the address that will be
4959 computed. This is the alignment of the thing we return if *POFFSET
4960 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4962 If any of the extraction expressions is volatile,
4963 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4965 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4966 is a mode that can be used to access the field. In that case, *PBITSIZE
4969 If the field describes a variable-sized object, *PMODE is set to
4970 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4971 this case, but the address of the object can be found. */
4974 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4975 punsignedp, pvolatilep, palignment)
4977 HOST_WIDE_INT *pbitsize;
4978 HOST_WIDE_INT *pbitpos;
4980 enum machine_mode *pmode;
4983 unsigned int *palignment;
4986 enum machine_mode mode = VOIDmode;
4987 tree offset = size_zero_node;
4988 tree bit_offset = bitsize_zero_node;
4989 unsigned int alignment = BIGGEST_ALIGNMENT;
4992 /* First get the mode, signedness, and size. We do this from just the
4993 outermost expression. */
4994 if (TREE_CODE (exp) == COMPONENT_REF)
4996 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4997 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4998 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5000 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5002 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5004 size_tree = TREE_OPERAND (exp, 1);
5005 *punsignedp = TREE_UNSIGNED (exp);
5009 mode = TYPE_MODE (TREE_TYPE (exp));
5010 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5012 if (mode == BLKmode)
5013 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5015 *pbitsize = GET_MODE_BITSIZE (mode);
5020 if (! host_integerp (size_tree, 1))
5021 mode = BLKmode, *pbitsize = -1;
5023 *pbitsize = tree_low_cst (size_tree, 1);
5026 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5027 and find the ultimate containing object. */
5030 if (TREE_CODE (exp) == BIT_FIELD_REF)
5031 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5032 else if (TREE_CODE (exp) == COMPONENT_REF)
5034 tree field = TREE_OPERAND (exp, 1);
5035 tree this_offset = DECL_FIELD_OFFSET (field);
5037 /* If this field hasn't been filled in yet, don't go
5038 past it. This should only happen when folding expressions
5039 made during type construction. */
5040 if (this_offset == 0)
5042 else if (! TREE_CONSTANT (this_offset)
5043 && contains_placeholder_p (this_offset))
5044 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5046 offset = size_binop (PLUS_EXPR, offset, this_offset);
5047 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5048 DECL_FIELD_BIT_OFFSET (field));
5050 if (! host_integerp (offset, 0))
5051 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5054 else if (TREE_CODE (exp) == ARRAY_REF)
5056 tree index = TREE_OPERAND (exp, 1);
5057 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5058 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5059 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5061 /* We assume all arrays have sizes that are a multiple of a byte.
5062 First subtract the lower bound, if any, in the type of the
5063 index, then convert to sizetype and multiply by the size of the
5065 if (low_bound != 0 && ! integer_zerop (low_bound))
5066 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5069 /* If the index has a self-referential type, pass it to a
5070 WITH_RECORD_EXPR; if the component size is, pass our
5071 component to one. */
5072 if (! TREE_CONSTANT (index)
5073 && contains_placeholder_p (index))
5074 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5075 if (! TREE_CONSTANT (unit_size)
5076 && contains_placeholder_p (unit_size))
5077 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5078 TREE_OPERAND (exp, 0));
5080 offset = size_binop (PLUS_EXPR, offset,
5081 size_binop (MULT_EXPR,
5082 convert (sizetype, index),
5086 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5087 && ! ((TREE_CODE (exp) == NOP_EXPR
5088 || TREE_CODE (exp) == CONVERT_EXPR)
5089 && (TYPE_MODE (TREE_TYPE (exp))
5090 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5093 /* If any reference in the chain is volatile, the effect is volatile. */
5094 if (TREE_THIS_VOLATILE (exp))
5097 /* If the offset is non-constant already, then we can't assume any
5098 alignment more than the alignment here. */
5099 if (! TREE_CONSTANT (offset))
5100 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5102 exp = TREE_OPERAND (exp, 0);
5106 alignment = MIN (alignment, DECL_ALIGN (exp));
5107 else if (TREE_TYPE (exp) != 0)
5108 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5110 /* If OFFSET is constant, see if we can return the whole thing as a
5111 constant bit position. Otherwise, split it up. */
5112 if (host_integerp (offset, 0)
5113 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5115 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5116 && host_integerp (tem, 0))
5117 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5119 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5122 *palignment = alignment;
5126 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5128 static enum memory_use_mode
5129 get_memory_usage_from_modifier (modifier)
5130 enum expand_modifier modifier;
5136 return MEMORY_USE_RO;
5138 case EXPAND_MEMORY_USE_WO:
5139 return MEMORY_USE_WO;
5141 case EXPAND_MEMORY_USE_RW:
5142 return MEMORY_USE_RW;
5144 case EXPAND_MEMORY_USE_DONT:
5145 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5146 MEMORY_USE_DONT, because they are modifiers to a call of
5147 expand_expr in the ADDR_EXPR case of expand_expr. */
5148 case EXPAND_CONST_ADDRESS:
5149 case EXPAND_INITIALIZER:
5150 return MEMORY_USE_DONT;
5151 case EXPAND_MEMORY_USE_BAD:
5157 /* Given an rtx VALUE that may contain additions and multiplications,
5158 return an equivalent value that just refers to a register or memory.
5159 This is done by generating instructions to perform the arithmetic
5160 and returning a pseudo-register containing the value.
5162 The returned value may be a REG, SUBREG, MEM or constant. */
5165 force_operand (value, target)
5168 register optab binoptab = 0;
5169 /* Use a temporary to force order of execution of calls to
5173 /* Use subtarget as the target for operand 0 of a binary operation. */
5174 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5176 /* Check for a PIC address load. */
5178 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5179 && XEXP (value, 0) == pic_offset_table_rtx
5180 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5181 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5182 || GET_CODE (XEXP (value, 1)) == CONST))
5185 subtarget = gen_reg_rtx (GET_MODE (value));
5186 emit_move_insn (subtarget, value);
5190 if (GET_CODE (value) == PLUS)
5191 binoptab = add_optab;
5192 else if (GET_CODE (value) == MINUS)
5193 binoptab = sub_optab;
5194 else if (GET_CODE (value) == MULT)
5196 op2 = XEXP (value, 1);
5197 if (!CONSTANT_P (op2)
5198 && !(GET_CODE (op2) == REG && op2 != subtarget))
5200 tmp = force_operand (XEXP (value, 0), subtarget);
5201 return expand_mult (GET_MODE (value), tmp,
5202 force_operand (op2, NULL_RTX),
5208 op2 = XEXP (value, 1);
5209 if (!CONSTANT_P (op2)
5210 && !(GET_CODE (op2) == REG && op2 != subtarget))
5212 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5214 binoptab = add_optab;
5215 op2 = negate_rtx (GET_MODE (value), op2);
5218 /* Check for an addition with OP2 a constant integer and our first
5219 operand a PLUS of a virtual register and something else. In that
5220 case, we want to emit the sum of the virtual register and the
5221 constant first and then add the other value. This allows virtual
5222 register instantiation to simply modify the constant rather than
5223 creating another one around this addition. */
5224 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5225 && GET_CODE (XEXP (value, 0)) == PLUS
5226 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5227 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5228 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5230 rtx temp = expand_binop (GET_MODE (value), binoptab,
5231 XEXP (XEXP (value, 0), 0), op2,
5232 subtarget, 0, OPTAB_LIB_WIDEN);
5233 return expand_binop (GET_MODE (value), binoptab, temp,
5234 force_operand (XEXP (XEXP (value, 0), 1), 0),
5235 target, 0, OPTAB_LIB_WIDEN);
5238 tmp = force_operand (XEXP (value, 0), subtarget);
5239 return expand_binop (GET_MODE (value), binoptab, tmp,
5240 force_operand (op2, NULL_RTX),
5241 target, 0, OPTAB_LIB_WIDEN);
5242 /* We give UNSIGNEDP = 0 to expand_binop
5243 because the only operations we are expanding here are signed ones. */
5248 /* Subroutine of expand_expr:
5249 save the non-copied parts (LIST) of an expr (LHS), and return a list
5250 which can restore these values to their previous values,
5251 should something modify their storage. */
5254 save_noncopied_parts (lhs, list)
5261 for (tail = list; tail; tail = TREE_CHAIN (tail))
5262 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5263 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5266 tree part = TREE_VALUE (tail);
5267 tree part_type = TREE_TYPE (part);
5268 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5269 rtx target = assign_temp (part_type, 0, 1, 1);
5270 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5271 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5272 parts = tree_cons (to_be_saved,
5273 build (RTL_EXPR, part_type, NULL_TREE,
5276 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5281 /* Subroutine of expand_expr:
5282 record the non-copied parts (LIST) of an expr (LHS), and return a list
5283 which specifies the initial values of these parts. */
5286 init_noncopied_parts (lhs, list)
5293 for (tail = list; tail; tail = TREE_CHAIN (tail))
5294 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5295 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5296 else if (TREE_PURPOSE (tail))
5298 tree part = TREE_VALUE (tail);
5299 tree part_type = TREE_TYPE (part);
5300 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5301 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5306 /* Subroutine of expand_expr: return nonzero iff there is no way that
5307 EXP can reference X, which is being modified. TOP_P is nonzero if this
5308 call is going to be used to determine whether we need a temporary
5309 for EXP, as opposed to a recursive call to this function.
5311 It is always safe for this routine to return zero since it merely
5312 searches for optimization opportunities. */
5315 safe_from_p (x, exp, top_p)
5322 static int save_expr_count;
5323 static int save_expr_size = 0;
5324 static tree *save_expr_rewritten;
5325 static tree save_expr_trees[256];
5328 /* If EXP has varying size, we MUST use a target since we currently
5329 have no way of allocating temporaries of variable size
5330 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5331 So we assume here that something at a higher level has prevented a
5332 clash. This is somewhat bogus, but the best we can do. Only
5333 do this when X is BLKmode and when we are at the top level. */
5334 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5335 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5336 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5337 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5338 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5340 && GET_MODE (x) == BLKmode))
5343 if (top_p && save_expr_size == 0)
5347 save_expr_count = 0;
5348 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5349 save_expr_rewritten = &save_expr_trees[0];
5351 rtn = safe_from_p (x, exp, 1);
5353 for (i = 0; i < save_expr_count; ++i)
5355 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5357 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5365 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5366 find the underlying pseudo. */
5367 if (GET_CODE (x) == SUBREG)
5370 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5374 /* If X is a location in the outgoing argument area, it is always safe. */
5375 if (GET_CODE (x) == MEM
5376 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5377 || (GET_CODE (XEXP (x, 0)) == PLUS
5378 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5381 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5384 exp_rtl = DECL_RTL (exp);
5391 if (TREE_CODE (exp) == TREE_LIST)
5392 return ((TREE_VALUE (exp) == 0
5393 || safe_from_p (x, TREE_VALUE (exp), 0))
5394 && (TREE_CHAIN (exp) == 0
5395 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5396 else if (TREE_CODE (exp) == ERROR_MARK)
5397 return 1; /* An already-visited SAVE_EXPR? */
5402 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5406 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5407 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5411 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5412 the expression. If it is set, we conflict iff we are that rtx or
5413 both are in memory. Otherwise, we check all operands of the
5414 expression recursively. */
5416 switch (TREE_CODE (exp))
5419 return (staticp (TREE_OPERAND (exp, 0))
5420 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5421 || TREE_STATIC (exp));
5424 if (GET_CODE (x) == MEM)
5429 exp_rtl = CALL_EXPR_RTL (exp);
5432 /* Assume that the call will clobber all hard registers and
5434 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5435 || GET_CODE (x) == MEM)
5442 /* If a sequence exists, we would have to scan every instruction
5443 in the sequence to see if it was safe. This is probably not
5445 if (RTL_EXPR_SEQUENCE (exp))
5448 exp_rtl = RTL_EXPR_RTL (exp);
5451 case WITH_CLEANUP_EXPR:
5452 exp_rtl = RTL_EXPR_RTL (exp);
5455 case CLEANUP_POINT_EXPR:
5456 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5459 exp_rtl = SAVE_EXPR_RTL (exp);
5463 /* This SAVE_EXPR might appear many times in the top-level
5464 safe_from_p() expression, and if it has a complex
5465 subexpression, examining it multiple times could result
5466 in a combinatorial explosion. E.g. on an Alpha
5467 running at least 200MHz, a Fortran test case compiled with
5468 optimization took about 28 minutes to compile -- even though
5469 it was only a few lines long, and the complicated line causing
5470 so much time to be spent in the earlier version of safe_from_p()
5471 had only 293 or so unique nodes.
5473 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5474 where it is so we can turn it back in the top-level safe_from_p()
5477 /* For now, don't bother re-sizing the array. */
5478 if (save_expr_count >= save_expr_size)
5480 save_expr_rewritten[save_expr_count++] = exp;
5482 nops = tree_code_length[(int) SAVE_EXPR];
5483 for (i = 0; i < nops; i++)
5485 tree operand = TREE_OPERAND (exp, i);
5486 if (operand == NULL_TREE)
5488 TREE_SET_CODE (exp, ERROR_MARK);
5489 if (!safe_from_p (x, operand, 0))
5491 TREE_SET_CODE (exp, SAVE_EXPR);
5493 TREE_SET_CODE (exp, ERROR_MARK);
5497 /* The only operand we look at is operand 1. The rest aren't
5498 part of the expression. */
5499 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5501 case METHOD_CALL_EXPR:
5502 /* This takes a rtx argument, but shouldn't appear here. */
5509 /* If we have an rtx, we do not need to scan our operands. */
5513 nops = tree_code_length[(int) TREE_CODE (exp)];
5514 for (i = 0; i < nops; i++)
5515 if (TREE_OPERAND (exp, i) != 0
5516 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5520 /* If we have an rtl, find any enclosed object. Then see if we conflict
5524 if (GET_CODE (exp_rtl) == SUBREG)
5526 exp_rtl = SUBREG_REG (exp_rtl);
5527 if (GET_CODE (exp_rtl) == REG
5528 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5532 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5533 are memory and EXP is not readonly. */
5534 return ! (rtx_equal_p (x, exp_rtl)
5535 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5536 && ! TREE_READONLY (exp)));
5539 /* If we reach here, it is safe. */
5543 /* Subroutine of expand_expr: return nonzero iff EXP is an
5544 expression whose type is statically determinable. */
5550 if (TREE_CODE (exp) == PARM_DECL
5551 || TREE_CODE (exp) == VAR_DECL
5552 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5553 || TREE_CODE (exp) == COMPONENT_REF
5554 || TREE_CODE (exp) == ARRAY_REF)
5559 /* Subroutine of expand_expr: return rtx if EXP is a
5560 variable or parameter; else return 0. */
5567 switch (TREE_CODE (exp))
5571 return DECL_RTL (exp);
5577 #ifdef MAX_INTEGER_COMPUTATION_MODE
5579 check_max_integer_computation_mode (exp)
5582 enum tree_code code;
5583 enum machine_mode mode;
5585 /* Strip any NOPs that don't change the mode. */
5587 code = TREE_CODE (exp);
5589 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5590 if (code == NOP_EXPR
5591 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5594 /* First check the type of the overall operation. We need only look at
5595 unary, binary and relational operations. */
5596 if (TREE_CODE_CLASS (code) == '1'
5597 || TREE_CODE_CLASS (code) == '2'
5598 || TREE_CODE_CLASS (code) == '<')
5600 mode = TYPE_MODE (TREE_TYPE (exp));
5601 if (GET_MODE_CLASS (mode) == MODE_INT
5602 && mode > MAX_INTEGER_COMPUTATION_MODE)
5603 fatal ("unsupported wide integer operation");
5606 /* Check operand of a unary op. */
5607 if (TREE_CODE_CLASS (code) == '1')
5609 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5610 if (GET_MODE_CLASS (mode) == MODE_INT
5611 && mode > MAX_INTEGER_COMPUTATION_MODE)
5612 fatal ("unsupported wide integer operation");
5615 /* Check operands of a binary/comparison op. */
5616 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5618 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5619 if (GET_MODE_CLASS (mode) == MODE_INT
5620 && mode > MAX_INTEGER_COMPUTATION_MODE)
5621 fatal ("unsupported wide integer operation");
5623 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5624 if (GET_MODE_CLASS (mode) == MODE_INT
5625 && mode > MAX_INTEGER_COMPUTATION_MODE)
5626 fatal ("unsupported wide integer operation");
5632 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5633 has any readonly fields. If any of the fields have types that
5634 contain readonly fields, return true as well. */
5637 readonly_fields_p (type)
5642 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5643 if (TREE_CODE (field) == FIELD_DECL
5644 && (TREE_READONLY (field)
5645 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5646 && readonly_fields_p (TREE_TYPE (field)))))
5652 /* expand_expr: generate code for computing expression EXP.
5653 An rtx for the computed value is returned. The value is never null.
5654 In the case of a void EXP, const0_rtx is returned.
5656 The value may be stored in TARGET if TARGET is nonzero.
5657 TARGET is just a suggestion; callers must assume that
5658 the rtx returned may not be the same as TARGET.
5660 If TARGET is CONST0_RTX, it means that the value will be ignored.
5662 If TMODE is not VOIDmode, it suggests generating the
5663 result in mode TMODE. But this is done only when convenient.
5664 Otherwise, TMODE is ignored and the value generated in its natural mode.
5665 TMODE is just a suggestion; callers must assume that
5666 the rtx returned may not have mode TMODE.
5668 Note that TARGET may have neither TMODE nor MODE. In that case, it
5669 probably will not be used.
5671 If MODIFIER is EXPAND_SUM then when EXP is an addition
5672 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5673 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5674 products as above, or REG or MEM, or constant.
5675 Ordinarily in such cases we would output mul or add instructions
5676 and then return a pseudo reg containing the sum.
5678 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5679 it also marks a label as absolutely required (it can't be dead).
5680 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5681 This is used for outputting expressions used in initializers.
5683 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5684 with a constant address even if that address is not normally legitimate.
5685 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5688 expand_expr (exp, target, tmode, modifier)
5691 enum machine_mode tmode;
5692 enum expand_modifier modifier;
5694 register rtx op0, op1, temp;
5695 tree type = TREE_TYPE (exp);
5696 int unsignedp = TREE_UNSIGNED (type);
5697 register enum machine_mode mode;
5698 register enum tree_code code = TREE_CODE (exp);
5700 rtx subtarget, original_target;
5703 /* Used by check-memory-usage to make modifier read only. */
5704 enum expand_modifier ro_modifier;
5706 /* Handle ERROR_MARK before anybody tries to access its type. */
5707 if (TREE_CODE (exp) == ERROR_MARK)
5709 op0 = CONST0_RTX (tmode);
5715 mode = TYPE_MODE (type);
5716 /* Use subtarget as the target for operand 0 of a binary operation. */
5717 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5718 original_target = target;
5719 ignore = (target == const0_rtx
5720 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5721 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5722 || code == COND_EXPR)
5723 && TREE_CODE (type) == VOID_TYPE));
5725 /* Make a read-only version of the modifier. */
5726 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5727 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5728 ro_modifier = modifier;
5730 ro_modifier = EXPAND_NORMAL;
5732 /* Don't use hard regs as subtargets, because the combiner
5733 can only handle pseudo regs. */
5734 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5736 /* Avoid subtargets inside loops,
5737 since they hide some invariant expressions. */
5738 if (preserve_subexpressions_p ())
5741 /* If we are going to ignore this result, we need only do something
5742 if there is a side-effect somewhere in the expression. If there
5743 is, short-circuit the most common cases here. Note that we must
5744 not call expand_expr with anything but const0_rtx in case this
5745 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5749 if (! TREE_SIDE_EFFECTS (exp))
5752 /* Ensure we reference a volatile object even if value is ignored, but
5753 don't do this if all we are doing is taking its address. */
5754 if (TREE_THIS_VOLATILE (exp)
5755 && TREE_CODE (exp) != FUNCTION_DECL
5756 && mode != VOIDmode && mode != BLKmode
5757 && modifier != EXPAND_CONST_ADDRESS)
5759 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5760 if (GET_CODE (temp) == MEM)
5761 temp = copy_to_reg (temp);
5765 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5766 || code == INDIRECT_REF || code == BUFFER_REF)
5767 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5768 VOIDmode, ro_modifier);
5769 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5770 || code == ARRAY_REF)
5772 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5773 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5776 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5777 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5778 /* If the second operand has no side effects, just evaluate
5780 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5781 VOIDmode, ro_modifier);
5782 else if (code == BIT_FIELD_REF)
5784 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5785 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5786 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5793 #ifdef MAX_INTEGER_COMPUTATION_MODE
5794 /* Only check stuff here if the mode we want is different from the mode
5795 of the expression; if it's the same, check_max_integer_computiation_mode
5796 will handle it. Do we really need to check this stuff at all? */
5799 && GET_MODE (target) != mode
5800 && TREE_CODE (exp) != INTEGER_CST
5801 && TREE_CODE (exp) != PARM_DECL
5802 && TREE_CODE (exp) != ARRAY_REF
5803 && TREE_CODE (exp) != COMPONENT_REF
5804 && TREE_CODE (exp) != BIT_FIELD_REF
5805 && TREE_CODE (exp) != INDIRECT_REF
5806 && TREE_CODE (exp) != CALL_EXPR
5807 && TREE_CODE (exp) != VAR_DECL
5808 && TREE_CODE (exp) != RTL_EXPR)
5810 enum machine_mode mode = GET_MODE (target);
5812 if (GET_MODE_CLASS (mode) == MODE_INT
5813 && mode > MAX_INTEGER_COMPUTATION_MODE)
5814 fatal ("unsupported wide integer operation");
5818 && TREE_CODE (exp) != INTEGER_CST
5819 && TREE_CODE (exp) != PARM_DECL
5820 && TREE_CODE (exp) != ARRAY_REF
5821 && TREE_CODE (exp) != COMPONENT_REF
5822 && TREE_CODE (exp) != BIT_FIELD_REF
5823 && TREE_CODE (exp) != INDIRECT_REF
5824 && TREE_CODE (exp) != VAR_DECL
5825 && TREE_CODE (exp) != CALL_EXPR
5826 && TREE_CODE (exp) != RTL_EXPR
5827 && GET_MODE_CLASS (tmode) == MODE_INT
5828 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5829 fatal ("unsupported wide integer operation");
5831 check_max_integer_computation_mode (exp);
5834 /* If will do cse, generate all results into pseudo registers
5835 since 1) that allows cse to find more things
5836 and 2) otherwise cse could produce an insn the machine
5839 if (! cse_not_expected && mode != BLKmode && target
5840 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5847 tree function = decl_function_context (exp);
5848 /* Handle using a label in a containing function. */
5849 if (function != current_function_decl
5850 && function != inline_function_decl && function != 0)
5852 struct function *p = find_function_data (function);
5853 /* Allocate in the memory associated with the function
5854 that the label is in. */
5855 push_obstacks (p->function_obstack,
5856 p->function_maybepermanent_obstack);
5858 p->expr->x_forced_labels
5859 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5860 p->expr->x_forced_labels);
5865 if (modifier == EXPAND_INITIALIZER)
5866 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5871 temp = gen_rtx_MEM (FUNCTION_MODE,
5872 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5873 if (function != current_function_decl
5874 && function != inline_function_decl && function != 0)
5875 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5880 if (DECL_RTL (exp) == 0)
5882 error_with_decl (exp, "prior parameter's size depends on `%s'");
5883 return CONST0_RTX (mode);
5886 /* ... fall through ... */
5889 /* If a static var's type was incomplete when the decl was written,
5890 but the type is complete now, lay out the decl now. */
5891 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5892 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5894 push_obstacks_nochange ();
5895 end_temporary_allocation ();
5896 layout_decl (exp, 0);
5897 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5901 /* Although static-storage variables start off initialized, according to
5902 ANSI C, a memcpy could overwrite them with uninitialized values. So
5903 we check them too. This also lets us check for read-only variables
5904 accessed via a non-const declaration, in case it won't be detected
5905 any other way (e.g., in an embedded system or OS kernel without
5908 Aggregates are not checked here; they're handled elsewhere. */
5909 if (cfun && current_function_check_memory_usage
5911 && GET_CODE (DECL_RTL (exp)) == MEM
5912 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5914 enum memory_use_mode memory_usage;
5915 memory_usage = get_memory_usage_from_modifier (modifier);
5917 if (memory_usage != MEMORY_USE_DONT)
5918 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5919 XEXP (DECL_RTL (exp), 0), Pmode,
5920 GEN_INT (int_size_in_bytes (type)),
5921 TYPE_MODE (sizetype),
5922 GEN_INT (memory_usage),
5923 TYPE_MODE (integer_type_node));
5926 /* ... fall through ... */
5930 if (DECL_RTL (exp) == 0)
5933 /* Ensure variable marked as used even if it doesn't go through
5934 a parser. If it hasn't be used yet, write out an external
5936 if (! TREE_USED (exp))
5938 assemble_external (exp);
5939 TREE_USED (exp) = 1;
5942 /* Show we haven't gotten RTL for this yet. */
5945 /* Handle variables inherited from containing functions. */
5946 context = decl_function_context (exp);
5948 /* We treat inline_function_decl as an alias for the current function
5949 because that is the inline function whose vars, types, etc.
5950 are being merged into the current function.
5951 See expand_inline_function. */
5953 if (context != 0 && context != current_function_decl
5954 && context != inline_function_decl
5955 /* If var is static, we don't need a static chain to access it. */
5956 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5957 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5961 /* Mark as non-local and addressable. */
5962 DECL_NONLOCAL (exp) = 1;
5963 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5965 mark_addressable (exp);
5966 if (GET_CODE (DECL_RTL (exp)) != MEM)
5968 addr = XEXP (DECL_RTL (exp), 0);
5969 if (GET_CODE (addr) == MEM)
5970 addr = gen_rtx_MEM (Pmode,
5971 fix_lexical_addr (XEXP (addr, 0), exp));
5973 addr = fix_lexical_addr (addr, exp);
5974 temp = change_address (DECL_RTL (exp), mode, addr);
5977 /* This is the case of an array whose size is to be determined
5978 from its initializer, while the initializer is still being parsed.
5981 else if (GET_CODE (DECL_RTL (exp)) == MEM
5982 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5983 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5984 XEXP (DECL_RTL (exp), 0));
5986 /* If DECL_RTL is memory, we are in the normal case and either
5987 the address is not valid or it is not a register and -fforce-addr
5988 is specified, get the address into a register. */
5990 else if (GET_CODE (DECL_RTL (exp)) == MEM
5991 && modifier != EXPAND_CONST_ADDRESS
5992 && modifier != EXPAND_SUM
5993 && modifier != EXPAND_INITIALIZER
5994 && (! memory_address_p (DECL_MODE (exp),
5995 XEXP (DECL_RTL (exp), 0))
5997 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5998 temp = change_address (DECL_RTL (exp), VOIDmode,
5999 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6001 /* If we got something, return it. But first, set the alignment
6002 the address is a register. */
6005 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6006 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6011 /* If the mode of DECL_RTL does not match that of the decl, it
6012 must be a promoted value. We return a SUBREG of the wanted mode,
6013 but mark it so that we know that it was already extended. */
6015 if (GET_CODE (DECL_RTL (exp)) == REG
6016 && GET_MODE (DECL_RTL (exp)) != mode)
6018 /* Get the signedness used for this variable. Ensure we get the
6019 same mode we got when the variable was declared. */
6020 if (GET_MODE (DECL_RTL (exp))
6021 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6024 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6025 SUBREG_PROMOTED_VAR_P (temp) = 1;
6026 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6030 return DECL_RTL (exp);
6033 return immed_double_const (TREE_INT_CST_LOW (exp),
6034 TREE_INT_CST_HIGH (exp), mode);
6037 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6038 EXPAND_MEMORY_USE_BAD);
6041 /* If optimized, generate immediate CONST_DOUBLE
6042 which will be turned into memory by reload if necessary.
6044 We used to force a register so that loop.c could see it. But
6045 this does not allow gen_* patterns to perform optimizations with
6046 the constants. It also produces two insns in cases like "x = 1.0;".
6047 On most machines, floating-point constants are not permitted in
6048 many insns, so we'd end up copying it to a register in any case.
6050 Now, we do the copying in expand_binop, if appropriate. */
6051 return immed_real_const (exp);
6055 if (! TREE_CST_RTL (exp))
6056 output_constant_def (exp);
6058 /* TREE_CST_RTL probably contains a constant address.
6059 On RISC machines where a constant address isn't valid,
6060 make some insns to get that address into a register. */
6061 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6062 && modifier != EXPAND_CONST_ADDRESS
6063 && modifier != EXPAND_INITIALIZER
6064 && modifier != EXPAND_SUM
6065 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6067 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6068 return change_address (TREE_CST_RTL (exp), VOIDmode,
6069 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6070 return TREE_CST_RTL (exp);
6072 case EXPR_WITH_FILE_LOCATION:
6075 char *saved_input_filename = input_filename;
6076 int saved_lineno = lineno;
6077 input_filename = EXPR_WFL_FILENAME (exp);
6078 lineno = EXPR_WFL_LINENO (exp);
6079 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6080 emit_line_note (input_filename, lineno);
6081 /* Possibly avoid switching back and force here */
6082 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6083 input_filename = saved_input_filename;
6084 lineno = saved_lineno;
6089 context = decl_function_context (exp);
6091 /* If this SAVE_EXPR was at global context, assume we are an
6092 initialization function and move it into our context. */
6094 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6096 /* We treat inline_function_decl as an alias for the current function
6097 because that is the inline function whose vars, types, etc.
6098 are being merged into the current function.
6099 See expand_inline_function. */
6100 if (context == current_function_decl || context == inline_function_decl)
6103 /* If this is non-local, handle it. */
6106 /* The following call just exists to abort if the context is
6107 not of a containing function. */
6108 find_function_data (context);
6110 temp = SAVE_EXPR_RTL (exp);
6111 if (temp && GET_CODE (temp) == REG)
6113 put_var_into_stack (exp);
6114 temp = SAVE_EXPR_RTL (exp);
6116 if (temp == 0 || GET_CODE (temp) != MEM)
6118 return change_address (temp, mode,
6119 fix_lexical_addr (XEXP (temp, 0), exp));
6121 if (SAVE_EXPR_RTL (exp) == 0)
6123 if (mode == VOIDmode)
6126 temp = assign_temp (type, 3, 0, 0);
6128 SAVE_EXPR_RTL (exp) = temp;
6129 if (!optimize && GET_CODE (temp) == REG)
6130 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6133 /* If the mode of TEMP does not match that of the expression, it
6134 must be a promoted value. We pass store_expr a SUBREG of the
6135 wanted mode but mark it so that we know that it was already
6136 extended. Note that `unsignedp' was modified above in
6139 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6141 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6142 SUBREG_PROMOTED_VAR_P (temp) = 1;
6143 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6146 if (temp == const0_rtx)
6147 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6148 EXPAND_MEMORY_USE_BAD);
6150 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6152 TREE_USED (exp) = 1;
6155 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6156 must be a promoted value. We return a SUBREG of the wanted mode,
6157 but mark it so that we know that it was already extended. */
6159 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6160 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6162 /* Compute the signedness and make the proper SUBREG. */
6163 promote_mode (type, mode, &unsignedp, 0);
6164 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6165 SUBREG_PROMOTED_VAR_P (temp) = 1;
6166 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6170 return SAVE_EXPR_RTL (exp);
6175 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6176 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6180 case PLACEHOLDER_EXPR:
6182 tree placeholder_expr;
6184 /* If there is an object on the head of the placeholder list,
6185 see if some object in it of type TYPE or a pointer to it. For
6186 further information, see tree.def. */
6187 for (placeholder_expr = placeholder_list;
6188 placeholder_expr != 0;
6189 placeholder_expr = TREE_CHAIN (placeholder_expr))
6191 tree need_type = TYPE_MAIN_VARIANT (type);
6193 tree old_list = placeholder_list;
6196 /* Find the outermost reference that is of the type we want.
6197 If none, see if any object has a type that is a pointer to
6198 the type we want. */
6199 for (elt = TREE_PURPOSE (placeholder_expr);
6200 elt != 0 && object == 0;
6202 = ((TREE_CODE (elt) == COMPOUND_EXPR
6203 || TREE_CODE (elt) == COND_EXPR)
6204 ? TREE_OPERAND (elt, 1)
6205 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6206 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6207 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6208 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6209 ? TREE_OPERAND (elt, 0) : 0))
6210 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6213 for (elt = TREE_PURPOSE (placeholder_expr);
6214 elt != 0 && object == 0;
6216 = ((TREE_CODE (elt) == COMPOUND_EXPR
6217 || TREE_CODE (elt) == COND_EXPR)
6218 ? TREE_OPERAND (elt, 1)
6219 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6220 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6221 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6222 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6223 ? TREE_OPERAND (elt, 0) : 0))
6224 if (POINTER_TYPE_P (TREE_TYPE (elt))
6225 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6227 object = build1 (INDIRECT_REF, need_type, elt);
6231 /* Expand this object skipping the list entries before
6232 it was found in case it is also a PLACEHOLDER_EXPR.
6233 In that case, we want to translate it using subsequent
6235 placeholder_list = TREE_CHAIN (placeholder_expr);
6236 temp = expand_expr (object, original_target, tmode,
6238 placeholder_list = old_list;
6244 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6247 case WITH_RECORD_EXPR:
6248 /* Put the object on the placeholder list, expand our first operand,
6249 and pop the list. */
6250 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6252 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6253 tmode, ro_modifier);
6254 placeholder_list = TREE_CHAIN (placeholder_list);
6258 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6259 expand_goto (TREE_OPERAND (exp, 0));
6261 expand_computed_goto (TREE_OPERAND (exp, 0));
6265 expand_exit_loop_if_false (NULL_PTR,
6266 invert_truthvalue (TREE_OPERAND (exp, 0)));
6269 case LABELED_BLOCK_EXPR:
6270 if (LABELED_BLOCK_BODY (exp))
6271 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6272 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6275 case EXIT_BLOCK_EXPR:
6276 if (EXIT_BLOCK_RETURN (exp))
6277 sorry ("returned value in block_exit_expr");
6278 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6283 expand_start_loop (1);
6284 expand_expr_stmt (TREE_OPERAND (exp, 0));
6292 tree vars = TREE_OPERAND (exp, 0);
6293 int vars_need_expansion = 0;
6295 /* Need to open a binding contour here because
6296 if there are any cleanups they must be contained here. */
6297 expand_start_bindings (2);
6299 /* Mark the corresponding BLOCK for output in its proper place. */
6300 if (TREE_OPERAND (exp, 2) != 0
6301 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6302 insert_block (TREE_OPERAND (exp, 2));
6304 /* If VARS have not yet been expanded, expand them now. */
6307 if (DECL_RTL (vars) == 0)
6309 vars_need_expansion = 1;
6312 expand_decl_init (vars);
6313 vars = TREE_CHAIN (vars);
6316 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6318 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6324 if (RTL_EXPR_SEQUENCE (exp))
6326 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6328 emit_insns (RTL_EXPR_SEQUENCE (exp));
6329 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6331 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6332 free_temps_for_rtl_expr (exp);
6333 return RTL_EXPR_RTL (exp);
6336 /* If we don't need the result, just ensure we evaluate any
6341 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6342 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6343 EXPAND_MEMORY_USE_BAD);
6347 /* All elts simple constants => refer to a constant in memory. But
6348 if this is a non-BLKmode mode, let it store a field at a time
6349 since that should make a CONST_INT or CONST_DOUBLE when we
6350 fold. Likewise, if we have a target we can use, it is best to
6351 store directly into the target unless the type is large enough
6352 that memcpy will be used. If we are making an initializer and
6353 all operands are constant, put it in memory as well. */
6354 else if ((TREE_STATIC (exp)
6355 && ((mode == BLKmode
6356 && ! (target != 0 && safe_from_p (target, exp, 1)))
6357 || TREE_ADDRESSABLE (exp)
6358 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6359 && (! MOVE_BY_PIECES_P
6360 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6362 && ! mostly_zeros_p (exp))))
6363 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6365 rtx constructor = output_constant_def (exp);
6367 if (modifier != EXPAND_CONST_ADDRESS
6368 && modifier != EXPAND_INITIALIZER
6369 && modifier != EXPAND_SUM
6370 && (! memory_address_p (GET_MODE (constructor),
6371 XEXP (constructor, 0))
6373 && GET_CODE (XEXP (constructor, 0)) != REG)))
6374 constructor = change_address (constructor, VOIDmode,
6375 XEXP (constructor, 0));
6381 /* Handle calls that pass values in multiple non-contiguous
6382 locations. The Irix 6 ABI has examples of this. */
6383 if (target == 0 || ! safe_from_p (target, exp, 1)
6384 || GET_CODE (target) == PARALLEL)
6386 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6387 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6389 target = assign_temp (type, 0, 1, 1);
6392 if (TREE_READONLY (exp))
6394 if (GET_CODE (target) == MEM)
6395 target = copy_rtx (target);
6397 RTX_UNCHANGING_P (target) = 1;
6400 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6401 int_size_in_bytes (TREE_TYPE (exp)));
6407 tree exp1 = TREE_OPERAND (exp, 0);
6410 tree string = string_constant (exp1, &index);
6412 /* Try to optimize reads from const strings. */
6414 && TREE_CODE (string) == STRING_CST
6415 && TREE_CODE (index) == INTEGER_CST
6416 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6417 && GET_MODE_CLASS (mode) == MODE_INT
6418 && GET_MODE_SIZE (mode) == 1
6419 && modifier != EXPAND_MEMORY_USE_WO)
6421 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6423 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6424 op0 = memory_address (mode, op0);
6426 if (cfun && current_function_check_memory_usage
6427 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6429 enum memory_use_mode memory_usage;
6430 memory_usage = get_memory_usage_from_modifier (modifier);
6432 if (memory_usage != MEMORY_USE_DONT)
6434 in_check_memory_usage = 1;
6435 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6437 GEN_INT (int_size_in_bytes (type)),
6438 TYPE_MODE (sizetype),
6439 GEN_INT (memory_usage),
6440 TYPE_MODE (integer_type_node));
6441 in_check_memory_usage = 0;
6445 temp = gen_rtx_MEM (mode, op0);
6446 /* If address was computed by addition,
6447 mark this as an element of an aggregate. */
6448 if (TREE_CODE (exp1) == PLUS_EXPR
6449 || (TREE_CODE (exp1) == SAVE_EXPR
6450 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6451 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6452 || (TREE_CODE (exp1) == ADDR_EXPR
6453 && (exp2 = TREE_OPERAND (exp1, 0))
6454 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6455 MEM_SET_IN_STRUCT_P (temp, 1);
6457 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6458 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6460 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6461 here, because, in C and C++, the fact that a location is accessed
6462 through a pointer to const does not mean that the value there can
6463 never change. Languages where it can never change should
6464 also set TREE_STATIC. */
6465 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6467 /* If we are writing to this object and its type is a record with
6468 readonly fields, we must mark it as readonly so it will
6469 conflict with readonly references to those fields. */
6470 if (modifier == EXPAND_MEMORY_USE_WO
6471 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6472 RTX_UNCHANGING_P (temp) = 1;
6478 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6482 tree array = TREE_OPERAND (exp, 0);
6483 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6484 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6485 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6488 /* Optimize the special-case of a zero lower bound.
6490 We convert the low_bound to sizetype to avoid some problems
6491 with constant folding. (E.g. suppose the lower bound is 1,
6492 and its mode is QI. Without the conversion, (ARRAY
6493 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6494 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6496 if (! integer_zerop (low_bound))
6497 index = size_diffop (index, convert (sizetype, low_bound));
6499 /* Fold an expression like: "foo"[2].
6500 This is not done in fold so it won't happen inside &.
6501 Don't fold if this is for wide characters since it's too
6502 difficult to do correctly and this is a very rare case. */
6504 if (TREE_CODE (array) == STRING_CST
6505 && TREE_CODE (index) == INTEGER_CST
6506 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6507 && GET_MODE_CLASS (mode) == MODE_INT
6508 && GET_MODE_SIZE (mode) == 1)
6510 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6512 /* If this is a constant index into a constant array,
6513 just get the value from the array. Handle both the cases when
6514 we have an explicit constructor and when our operand is a variable
6515 that was declared const. */
6517 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6518 && TREE_CODE (index) == INTEGER_CST
6519 && 0 > compare_tree_int (index,
6520 list_length (CONSTRUCTOR_ELTS
6521 (TREE_OPERAND (exp, 0)))))
6525 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6526 i = TREE_INT_CST_LOW (index);
6527 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6531 return expand_expr (fold (TREE_VALUE (elem)), target,
6532 tmode, ro_modifier);
6535 else if (optimize >= 1
6536 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6537 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6538 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6540 if (TREE_CODE (index) == INTEGER_CST)
6542 tree init = DECL_INITIAL (array);
6544 if (TREE_CODE (init) == CONSTRUCTOR)
6548 for (elem = CONSTRUCTOR_ELTS (init);
6550 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6551 elem = TREE_CHAIN (elem))
6555 return expand_expr (fold (TREE_VALUE (elem)), target,
6556 tmode, ro_modifier);
6558 else if (TREE_CODE (init) == STRING_CST
6559 && 0 > compare_tree_int (index,
6560 TREE_STRING_LENGTH (init)))
6562 (TREE_STRING_POINTER
6563 (init)[TREE_INT_CST_LOW (index)]));
6568 /* ... fall through ... */
6572 /* If the operand is a CONSTRUCTOR, we can just extract the
6573 appropriate field if it is present. Don't do this if we have
6574 already written the data since we want to refer to that copy
6575 and varasm.c assumes that's what we'll do. */
6576 if (code != ARRAY_REF
6577 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6578 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6582 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6583 elt = TREE_CHAIN (elt))
6584 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6585 /* We can normally use the value of the field in the
6586 CONSTRUCTOR. However, if this is a bitfield in
6587 an integral mode that we can fit in a HOST_WIDE_INT,
6588 we must mask only the number of bits in the bitfield,
6589 since this is done implicitly by the constructor. If
6590 the bitfield does not meet either of those conditions,
6591 we can't do this optimization. */
6592 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6593 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6595 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6596 <= HOST_BITS_PER_WIDE_INT))))
6598 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6599 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6601 HOST_WIDE_INT bitsize
6602 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6604 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6606 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6607 op0 = expand_and (op0, op1, target);
6611 enum machine_mode imode
6612 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6614 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6617 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6619 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6629 enum machine_mode mode1;
6630 HOST_WIDE_INT bitsize, bitpos;
6633 unsigned int alignment;
6634 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6635 &mode1, &unsignedp, &volatilep,
6638 /* If we got back the original object, something is wrong. Perhaps
6639 we are evaluating an expression too early. In any event, don't
6640 infinitely recurse. */
6644 /* If TEM's type is a union of variable size, pass TARGET to the inner
6645 computation, since it will need a temporary and TARGET is known
6646 to have to do. This occurs in unchecked conversion in Ada. */
6648 op0 = expand_expr (tem,
6649 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6650 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6652 ? target : NULL_RTX),
6654 (modifier == EXPAND_INITIALIZER
6655 || modifier == EXPAND_CONST_ADDRESS)
6656 ? modifier : EXPAND_NORMAL);
6658 /* If this is a constant, put it into a register if it is a
6659 legitimate constant and OFFSET is 0 and memory if it isn't. */
6660 if (CONSTANT_P (op0))
6662 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6663 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6665 op0 = force_reg (mode, op0);
6667 op0 = validize_mem (force_const_mem (mode, op0));
6672 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6674 /* If this object is in memory, put it into a register.
6675 This case can't occur in C, but can in Ada if we have
6676 unchecked conversion of an expression from a scalar type to
6677 an array or record type. */
6678 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6679 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6681 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6683 mark_temp_addr_taken (memloc);
6684 emit_move_insn (memloc, op0);
6688 if (GET_CODE (op0) != MEM)
6691 if (GET_MODE (offset_rtx) != ptr_mode)
6693 #ifdef POINTERS_EXTEND_UNSIGNED
6694 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6696 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6700 /* A constant address in OP0 can have VOIDmode, we must not try
6701 to call force_reg for that case. Avoid that case. */
6702 if (GET_CODE (op0) == MEM
6703 && GET_MODE (op0) == BLKmode
6704 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6706 && (bitpos % bitsize) == 0
6707 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6708 && alignment == GET_MODE_ALIGNMENT (mode1))
6710 rtx temp = change_address (op0, mode1,
6711 plus_constant (XEXP (op0, 0),
6714 if (GET_CODE (XEXP (temp, 0)) == REG)
6717 op0 = change_address (op0, mode1,
6718 force_reg (GET_MODE (XEXP (temp, 0)),
6724 op0 = change_address (op0, VOIDmode,
6725 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6726 force_reg (ptr_mode,
6730 /* Don't forget about volatility even if this is a bitfield. */
6731 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6733 op0 = copy_rtx (op0);
6734 MEM_VOLATILE_P (op0) = 1;
6737 /* Check the access. */
6738 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6740 enum memory_use_mode memory_usage;
6741 memory_usage = get_memory_usage_from_modifier (modifier);
6743 if (memory_usage != MEMORY_USE_DONT)
6748 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6749 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6751 /* Check the access right of the pointer. */
6752 if (size > BITS_PER_UNIT)
6753 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6755 GEN_INT (size / BITS_PER_UNIT),
6756 TYPE_MODE (sizetype),
6757 GEN_INT (memory_usage),
6758 TYPE_MODE (integer_type_node));
6762 /* In cases where an aligned union has an unaligned object
6763 as a field, we might be extracting a BLKmode value from
6764 an integer-mode (e.g., SImode) object. Handle this case
6765 by doing the extract into an object as wide as the field
6766 (which we know to be the width of a basic mode), then
6767 storing into memory, and changing the mode to BLKmode.
6768 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6769 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6770 if (mode1 == VOIDmode
6771 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6772 || (modifier != EXPAND_CONST_ADDRESS
6773 && modifier != EXPAND_INITIALIZER
6774 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6775 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6776 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6777 /* If the field isn't aligned enough to fetch as a memref,
6778 fetch it as a bit field. */
6779 || (mode1 != BLKmode
6780 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6781 && ((TYPE_ALIGN (TREE_TYPE (tem))
6782 < GET_MODE_ALIGNMENT (mode))
6783 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6784 /* If the type and the field are a constant size and the
6785 size of the type isn't the same size as the bitfield,
6786 we must use bitfield operations. */
6788 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6790 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6792 || (modifier != EXPAND_CONST_ADDRESS
6793 && modifier != EXPAND_INITIALIZER
6795 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6796 && (TYPE_ALIGN (type) > alignment
6797 || bitpos % TYPE_ALIGN (type) != 0)))
6799 enum machine_mode ext_mode = mode;
6801 if (ext_mode == BLKmode
6802 && ! (target != 0 && GET_CODE (op0) == MEM
6803 && GET_CODE (target) == MEM
6804 && bitpos % BITS_PER_UNIT == 0))
6805 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6807 if (ext_mode == BLKmode)
6809 /* In this case, BITPOS must start at a byte boundary and
6810 TARGET, if specified, must be a MEM. */
6811 if (GET_CODE (op0) != MEM
6812 || (target != 0 && GET_CODE (target) != MEM)
6813 || bitpos % BITS_PER_UNIT != 0)
6816 op0 = change_address (op0, VOIDmode,
6817 plus_constant (XEXP (op0, 0),
6818 bitpos / BITS_PER_UNIT));
6820 target = assign_temp (type, 0, 1, 1);
6822 emit_block_move (target, op0,
6823 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6830 op0 = validize_mem (op0);
6832 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6833 mark_reg_pointer (XEXP (op0, 0), alignment);
6835 op0 = extract_bit_field (op0, bitsize, bitpos,
6836 unsignedp, target, ext_mode, ext_mode,
6838 int_size_in_bytes (TREE_TYPE (tem)));
6840 /* If the result is a record type and BITSIZE is narrower than
6841 the mode of OP0, an integral mode, and this is a big endian
6842 machine, we must put the field into the high-order bits. */
6843 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6844 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6845 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6846 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6847 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6851 if (mode == BLKmode)
6853 rtx new = assign_stack_temp (ext_mode,
6854 bitsize / BITS_PER_UNIT, 0);
6856 emit_move_insn (new, op0);
6857 op0 = copy_rtx (new);
6858 PUT_MODE (op0, BLKmode);
6859 MEM_SET_IN_STRUCT_P (op0, 1);
6865 /* If the result is BLKmode, use that to access the object
6867 if (mode == BLKmode)
6870 /* Get a reference to just this component. */
6871 if (modifier == EXPAND_CONST_ADDRESS
6872 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6873 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6874 (bitpos / BITS_PER_UNIT)));
6876 op0 = change_address (op0, mode1,
6877 plus_constant (XEXP (op0, 0),
6878 (bitpos / BITS_PER_UNIT)));
6880 if (GET_CODE (op0) == MEM)
6881 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6883 if (GET_CODE (XEXP (op0, 0)) == REG)
6884 mark_reg_pointer (XEXP (op0, 0), alignment);
6886 MEM_SET_IN_STRUCT_P (op0, 1);
6887 MEM_VOLATILE_P (op0) |= volatilep;
6888 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6889 || modifier == EXPAND_CONST_ADDRESS
6890 || modifier == EXPAND_INITIALIZER)
6892 else if (target == 0)
6893 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6895 convert_move (target, op0, unsignedp);
6899 /* Intended for a reference to a buffer of a file-object in Pascal.
6900 But it's not certain that a special tree code will really be
6901 necessary for these. INDIRECT_REF might work for them. */
6907 /* Pascal set IN expression.
6910 rlo = set_low - (set_low%bits_per_word);
6911 the_word = set [ (index - rlo)/bits_per_word ];
6912 bit_index = index % bits_per_word;
6913 bitmask = 1 << bit_index;
6914 return !!(the_word & bitmask); */
6916 tree set = TREE_OPERAND (exp, 0);
6917 tree index = TREE_OPERAND (exp, 1);
6918 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6919 tree set_type = TREE_TYPE (set);
6920 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6921 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6922 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6923 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6924 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6925 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6926 rtx setaddr = XEXP (setval, 0);
6927 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6929 rtx diff, quo, rem, addr, bit, result;
6931 preexpand_calls (exp);
6933 /* If domain is empty, answer is no. Likewise if index is constant
6934 and out of bounds. */
6935 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6936 && TREE_CODE (set_low_bound) == INTEGER_CST
6937 && tree_int_cst_lt (set_high_bound, set_low_bound))
6938 || (TREE_CODE (index) == INTEGER_CST
6939 && TREE_CODE (set_low_bound) == INTEGER_CST
6940 && tree_int_cst_lt (index, set_low_bound))
6941 || (TREE_CODE (set_high_bound) == INTEGER_CST
6942 && TREE_CODE (index) == INTEGER_CST
6943 && tree_int_cst_lt (set_high_bound, index))))
6947 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6949 /* If we get here, we have to generate the code for both cases
6950 (in range and out of range). */
6952 op0 = gen_label_rtx ();
6953 op1 = gen_label_rtx ();
6955 if (! (GET_CODE (index_val) == CONST_INT
6956 && GET_CODE (lo_r) == CONST_INT))
6958 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6959 GET_MODE (index_val), iunsignedp, 0, op1);
6962 if (! (GET_CODE (index_val) == CONST_INT
6963 && GET_CODE (hi_r) == CONST_INT))
6965 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6966 GET_MODE (index_val), iunsignedp, 0, op1);
6969 /* Calculate the element number of bit zero in the first word
6971 if (GET_CODE (lo_r) == CONST_INT)
6972 rlow = GEN_INT (INTVAL (lo_r)
6973 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6975 rlow = expand_binop (index_mode, and_optab, lo_r,
6976 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6977 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6979 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6980 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6982 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6983 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6984 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6985 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6987 addr = memory_address (byte_mode,
6988 expand_binop (index_mode, add_optab, diff,
6989 setaddr, NULL_RTX, iunsignedp,
6992 /* Extract the bit we want to examine */
6993 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6994 gen_rtx_MEM (byte_mode, addr),
6995 make_tree (TREE_TYPE (index), rem),
6997 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6998 GET_MODE (target) == byte_mode ? target : 0,
6999 1, OPTAB_LIB_WIDEN);
7001 if (result != target)
7002 convert_move (target, result, 1);
7004 /* Output the code to handle the out-of-range case. */
7007 emit_move_insn (target, const0_rtx);
7012 case WITH_CLEANUP_EXPR:
7013 if (RTL_EXPR_RTL (exp) == 0)
7016 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7017 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7019 /* That's it for this cleanup. */
7020 TREE_OPERAND (exp, 2) = 0;
7022 return RTL_EXPR_RTL (exp);
7024 case CLEANUP_POINT_EXPR:
7026 /* Start a new binding layer that will keep track of all cleanup
7027 actions to be performed. */
7028 expand_start_bindings (2);
7030 target_temp_slot_level = temp_slot_level;
7032 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7033 /* If we're going to use this value, load it up now. */
7035 op0 = force_not_mem (op0);
7036 preserve_temp_slots (op0);
7037 expand_end_bindings (NULL_TREE, 0, 0);
7042 /* Check for a built-in function. */
7043 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7044 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7046 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7047 return expand_builtin (exp, target, subtarget, tmode, ignore);
7049 /* If this call was expanded already by preexpand_calls,
7050 just return the result we got. */
7051 if (CALL_EXPR_RTL (exp) != 0)
7052 return CALL_EXPR_RTL (exp);
7054 return expand_call (exp, target, ignore);
7056 case NON_LVALUE_EXPR:
7059 case REFERENCE_EXPR:
7060 if (TREE_CODE (type) == UNION_TYPE)
7062 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7064 /* If both input and output are BLKmode, this conversion
7065 isn't actually doing anything unless we need to make the
7066 alignment stricter. */
7067 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7068 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7069 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7070 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7075 if (mode != BLKmode)
7076 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7078 target = assign_temp (type, 0, 1, 1);
7081 if (GET_CODE (target) == MEM)
7082 /* Store data into beginning of memory target. */
7083 store_expr (TREE_OPERAND (exp, 0),
7084 change_address (target, TYPE_MODE (valtype), 0), 0);
7086 else if (GET_CODE (target) == REG)
7087 /* Store this field into a union of the proper type. */
7088 store_field (target,
7089 MIN ((int_size_in_bytes (TREE_TYPE
7090 (TREE_OPERAND (exp, 0)))
7092 GET_MODE_BITSIZE (mode)),
7093 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7094 VOIDmode, 0, BITS_PER_UNIT,
7095 int_size_in_bytes (type), 0);
7099 /* Return the entire union. */
7103 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7105 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7108 /* If the signedness of the conversion differs and OP0 is
7109 a promoted SUBREG, clear that indication since we now
7110 have to do the proper extension. */
7111 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7112 && GET_CODE (op0) == SUBREG)
7113 SUBREG_PROMOTED_VAR_P (op0) = 0;
7118 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7119 if (GET_MODE (op0) == mode)
7122 /* If OP0 is a constant, just convert it into the proper mode. */
7123 if (CONSTANT_P (op0))
7125 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7126 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7128 if (modifier == EXPAND_INITIALIZER)
7129 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7133 convert_to_mode (mode, op0,
7134 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7136 convert_move (target, op0,
7137 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7141 /* We come here from MINUS_EXPR when the second operand is a
7144 this_optab = add_optab;
7146 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7147 something else, make sure we add the register to the constant and
7148 then to the other thing. This case can occur during strength
7149 reduction and doing it this way will produce better code if the
7150 frame pointer or argument pointer is eliminated.
7152 fold-const.c will ensure that the constant is always in the inner
7153 PLUS_EXPR, so the only case we need to do anything about is if
7154 sp, ap, or fp is our second argument, in which case we must swap
7155 the innermost first argument and our second argument. */
7157 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7158 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7159 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7160 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7161 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7162 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7164 tree t = TREE_OPERAND (exp, 1);
7166 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7167 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7170 /* If the result is to be ptr_mode and we are adding an integer to
7171 something, we might be forming a constant. So try to use
7172 plus_constant. If it produces a sum and we can't accept it,
7173 use force_operand. This allows P = &ARR[const] to generate
7174 efficient code on machines where a SYMBOL_REF is not a valid
7177 If this is an EXPAND_SUM call, always return the sum. */
7178 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7179 || mode == ptr_mode)
7181 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7182 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7183 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7187 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7189 /* Use immed_double_const to ensure that the constant is
7190 truncated according to the mode of OP1, then sign extended
7191 to a HOST_WIDE_INT. Using the constant directly can result
7192 in non-canonical RTL in a 64x32 cross compile. */
7194 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7196 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7197 op1 = plus_constant (op1, INTVAL (constant_part));
7198 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7199 op1 = force_operand (op1, target);
7203 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7204 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7205 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7209 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7211 if (! CONSTANT_P (op0))
7213 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7214 VOIDmode, modifier);
7215 /* Don't go to both_summands if modifier
7216 says it's not right to return a PLUS. */
7217 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7221 /* Use immed_double_const to ensure that the constant is
7222 truncated according to the mode of OP1, then sign extended
7223 to a HOST_WIDE_INT. Using the constant directly can result
7224 in non-canonical RTL in a 64x32 cross compile. */
7226 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7228 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7229 op0 = plus_constant (op0, INTVAL (constant_part));
7230 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7231 op0 = force_operand (op0, target);
7236 /* No sense saving up arithmetic to be done
7237 if it's all in the wrong mode to form part of an address.
7238 And force_operand won't know whether to sign-extend or
7240 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7241 || mode != ptr_mode)
7244 preexpand_calls (exp);
7245 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7248 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7249 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7252 /* Make sure any term that's a sum with a constant comes last. */
7253 if (GET_CODE (op0) == PLUS
7254 && CONSTANT_P (XEXP (op0, 1)))
7260 /* If adding to a sum including a constant,
7261 associate it to put the constant outside. */
7262 if (GET_CODE (op1) == PLUS
7263 && CONSTANT_P (XEXP (op1, 1)))
7265 rtx constant_term = const0_rtx;
7267 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7270 /* Ensure that MULT comes first if there is one. */
7271 else if (GET_CODE (op0) == MULT)
7272 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7274 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7276 /* Let's also eliminate constants from op0 if possible. */
7277 op0 = eliminate_constant_term (op0, &constant_term);
7279 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7280 their sum should be a constant. Form it into OP1, since the
7281 result we want will then be OP0 + OP1. */
7283 temp = simplify_binary_operation (PLUS, mode, constant_term,
7288 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7291 /* Put a constant term last and put a multiplication first. */
7292 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7293 temp = op1, op1 = op0, op0 = temp;
7295 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7296 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7299 /* For initializers, we are allowed to return a MINUS of two
7300 symbolic constants. Here we handle all cases when both operands
7302 /* Handle difference of two symbolic constants,
7303 for the sake of an initializer. */
7304 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7305 && really_constant_p (TREE_OPERAND (exp, 0))
7306 && really_constant_p (TREE_OPERAND (exp, 1)))
7308 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7309 VOIDmode, ro_modifier);
7310 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7311 VOIDmode, ro_modifier);
7313 /* If the last operand is a CONST_INT, use plus_constant of
7314 the negated constant. Else make the MINUS. */
7315 if (GET_CODE (op1) == CONST_INT)
7316 return plus_constant (op0, - INTVAL (op1));
7318 return gen_rtx_MINUS (mode, op0, op1);
7320 /* Convert A - const to A + (-const). */
7321 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7323 tree negated = fold (build1 (NEGATE_EXPR, type,
7324 TREE_OPERAND (exp, 1)));
7326 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7327 /* If we can't negate the constant in TYPE, leave it alone and
7328 expand_binop will negate it for us. We used to try to do it
7329 here in the signed version of TYPE, but that doesn't work
7330 on POINTER_TYPEs. */;
7333 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7337 this_optab = sub_optab;
7341 preexpand_calls (exp);
7342 /* If first operand is constant, swap them.
7343 Thus the following special case checks need only
7344 check the second operand. */
7345 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7347 register tree t1 = TREE_OPERAND (exp, 0);
7348 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7349 TREE_OPERAND (exp, 1) = t1;
7352 /* Attempt to return something suitable for generating an
7353 indexed address, for machines that support that. */
7355 if (modifier == EXPAND_SUM && mode == ptr_mode
7356 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7357 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7359 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7362 /* Apply distributive law if OP0 is x+c. */
7363 if (GET_CODE (op0) == PLUS
7364 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7369 (mode, XEXP (op0, 0),
7370 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7371 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7372 * INTVAL (XEXP (op0, 1))));
7374 if (GET_CODE (op0) != REG)
7375 op0 = force_operand (op0, NULL_RTX);
7376 if (GET_CODE (op0) != REG)
7377 op0 = copy_to_mode_reg (mode, op0);
7380 gen_rtx_MULT (mode, op0,
7381 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7384 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7387 /* Check for multiplying things that have been extended
7388 from a narrower type. If this machine supports multiplying
7389 in that narrower type with a result in the desired type,
7390 do it that way, and avoid the explicit type-conversion. */
7391 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7392 && TREE_CODE (type) == INTEGER_TYPE
7393 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7394 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7395 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7396 && int_fits_type_p (TREE_OPERAND (exp, 1),
7397 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7398 /* Don't use a widening multiply if a shift will do. */
7399 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7400 > HOST_BITS_PER_WIDE_INT)
7401 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7403 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7404 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7406 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7407 /* If both operands are extended, they must either both
7408 be zero-extended or both be sign-extended. */
7409 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7411 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7413 enum machine_mode innermode
7414 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7415 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7416 ? smul_widen_optab : umul_widen_optab);
7417 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7418 ? umul_widen_optab : smul_widen_optab);
7419 if (mode == GET_MODE_WIDER_MODE (innermode))
7421 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7423 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7424 NULL_RTX, VOIDmode, 0);
7425 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7426 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7429 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7430 NULL_RTX, VOIDmode, 0);
7433 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7434 && innermode == word_mode)
7437 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7438 NULL_RTX, VOIDmode, 0);
7439 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7440 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7443 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7444 NULL_RTX, VOIDmode, 0);
7445 temp = expand_binop (mode, other_optab, op0, op1, target,
7446 unsignedp, OPTAB_LIB_WIDEN);
7447 htem = expand_mult_highpart_adjust (innermode,
7448 gen_highpart (innermode, temp),
7450 gen_highpart (innermode, temp),
7452 emit_move_insn (gen_highpart (innermode, temp), htem);
7457 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7458 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7459 return expand_mult (mode, op0, op1, target, unsignedp);
7461 case TRUNC_DIV_EXPR:
7462 case FLOOR_DIV_EXPR:
7464 case ROUND_DIV_EXPR:
7465 case EXACT_DIV_EXPR:
7466 preexpand_calls (exp);
7467 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7469 /* Possible optimization: compute the dividend with EXPAND_SUM
7470 then if the divisor is constant can optimize the case
7471 where some terms of the dividend have coeffs divisible by it. */
7472 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7473 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7474 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7477 this_optab = flodiv_optab;
7480 case TRUNC_MOD_EXPR:
7481 case FLOOR_MOD_EXPR:
7483 case ROUND_MOD_EXPR:
7484 preexpand_calls (exp);
7485 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7487 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7488 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7489 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7491 case FIX_ROUND_EXPR:
7492 case FIX_FLOOR_EXPR:
7494 abort (); /* Not used for C. */
7496 case FIX_TRUNC_EXPR:
7497 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7499 target = gen_reg_rtx (mode);
7500 expand_fix (target, op0, unsignedp);
7504 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7506 target = gen_reg_rtx (mode);
7507 /* expand_float can't figure out what to do if FROM has VOIDmode.
7508 So give it the correct mode. With -O, cse will optimize this. */
7509 if (GET_MODE (op0) == VOIDmode)
7510 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7512 expand_float (target, op0,
7513 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7517 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7518 temp = expand_unop (mode, neg_optab, op0, target, 0);
7524 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7526 /* Handle complex values specially. */
7527 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7528 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7529 return expand_complex_abs (mode, op0, target, unsignedp);
7531 /* Unsigned abs is simply the operand. Testing here means we don't
7532 risk generating incorrect code below. */
7533 if (TREE_UNSIGNED (type))
7536 return expand_abs (mode, op0, target,
7537 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7541 target = original_target;
7542 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7543 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7544 || GET_MODE (target) != mode
7545 || (GET_CODE (target) == REG
7546 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7547 target = gen_reg_rtx (mode);
7548 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7549 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7551 /* First try to do it with a special MIN or MAX instruction.
7552 If that does not win, use a conditional jump to select the proper
7554 this_optab = (TREE_UNSIGNED (type)
7555 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7556 : (code == MIN_EXPR ? smin_optab : smax_optab));
7558 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7563 /* At this point, a MEM target is no longer useful; we will get better
7566 if (GET_CODE (target) == MEM)
7567 target = gen_reg_rtx (mode);
7570 emit_move_insn (target, op0);
7572 op0 = gen_label_rtx ();
7574 /* If this mode is an integer too wide to compare properly,
7575 compare word by word. Rely on cse to optimize constant cases. */
7576 if (GET_MODE_CLASS (mode) == MODE_INT
7577 && ! can_compare_p (GE, mode, ccp_jump))
7579 if (code == MAX_EXPR)
7580 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7581 target, op1, NULL_RTX, op0);
7583 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7584 op1, target, NULL_RTX, op0);
7588 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7589 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7590 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7593 emit_move_insn (target, op1);
7598 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7599 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7605 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7606 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7611 /* ??? Can optimize bitwise operations with one arg constant.
7612 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7613 and (a bitwise1 b) bitwise2 b (etc)
7614 but that is probably not worth while. */
7616 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7617 boolean values when we want in all cases to compute both of them. In
7618 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7619 as actual zero-or-1 values and then bitwise anding. In cases where
7620 there cannot be any side effects, better code would be made by
7621 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7622 how to recognize those cases. */
7624 case TRUTH_AND_EXPR:
7626 this_optab = and_optab;
7631 this_optab = ior_optab;
7634 case TRUTH_XOR_EXPR:
7636 this_optab = xor_optab;
7643 preexpand_calls (exp);
7644 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7646 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7647 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7650 /* Could determine the answer when only additive constants differ. Also,
7651 the addition of one can be handled by changing the condition. */
7658 case UNORDERED_EXPR:
7665 preexpand_calls (exp);
7666 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7670 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7671 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7673 && GET_CODE (original_target) == REG
7674 && (GET_MODE (original_target)
7675 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7677 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7680 if (temp != original_target)
7681 temp = copy_to_reg (temp);
7683 op1 = gen_label_rtx ();
7684 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7685 GET_MODE (temp), unsignedp, 0, op1);
7686 emit_move_insn (temp, const1_rtx);
7691 /* If no set-flag instruction, must generate a conditional
7692 store into a temporary variable. Drop through
7693 and handle this like && and ||. */
7695 case TRUTH_ANDIF_EXPR:
7696 case TRUTH_ORIF_EXPR:
7698 && (target == 0 || ! safe_from_p (target, exp, 1)
7699 /* Make sure we don't have a hard reg (such as function's return
7700 value) live across basic blocks, if not optimizing. */
7701 || (!optimize && GET_CODE (target) == REG
7702 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7703 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7706 emit_clr_insn (target);
7708 op1 = gen_label_rtx ();
7709 jumpifnot (exp, op1);
7712 emit_0_to_1_insn (target);
7715 return ignore ? const0_rtx : target;
7717 case TRUTH_NOT_EXPR:
7718 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7719 /* The parser is careful to generate TRUTH_NOT_EXPR
7720 only with operands that are always zero or one. */
7721 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7722 target, 1, OPTAB_LIB_WIDEN);
7728 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7730 return expand_expr (TREE_OPERAND (exp, 1),
7731 (ignore ? const0_rtx : target),
7735 /* If we would have a "singleton" (see below) were it not for a
7736 conversion in each arm, bring that conversion back out. */
7737 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7738 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7739 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7740 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7742 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7743 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7745 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7746 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7747 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7748 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7749 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7750 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7751 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7752 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7753 return expand_expr (build1 (NOP_EXPR, type,
7754 build (COND_EXPR, TREE_TYPE (true),
7755 TREE_OPERAND (exp, 0),
7757 target, tmode, modifier);
7761 /* Note that COND_EXPRs whose type is a structure or union
7762 are required to be constructed to contain assignments of
7763 a temporary variable, so that we can evaluate them here
7764 for side effect only. If type is void, we must do likewise. */
7766 /* If an arm of the branch requires a cleanup,
7767 only that cleanup is performed. */
7770 tree binary_op = 0, unary_op = 0;
7772 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7773 convert it to our mode, if necessary. */
7774 if (integer_onep (TREE_OPERAND (exp, 1))
7775 && integer_zerop (TREE_OPERAND (exp, 2))
7776 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7780 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7785 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7786 if (GET_MODE (op0) == mode)
7790 target = gen_reg_rtx (mode);
7791 convert_move (target, op0, unsignedp);
7795 /* Check for X ? A + B : A. If we have this, we can copy A to the
7796 output and conditionally add B. Similarly for unary operations.
7797 Don't do this if X has side-effects because those side effects
7798 might affect A or B and the "?" operation is a sequence point in
7799 ANSI. (operand_equal_p tests for side effects.) */
7801 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7802 && operand_equal_p (TREE_OPERAND (exp, 2),
7803 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7804 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7805 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7806 && operand_equal_p (TREE_OPERAND (exp, 1),
7807 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7808 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7809 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7810 && operand_equal_p (TREE_OPERAND (exp, 2),
7811 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7812 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7813 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7814 && operand_equal_p (TREE_OPERAND (exp, 1),
7815 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7816 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7818 /* If we are not to produce a result, we have no target. Otherwise,
7819 if a target was specified use it; it will not be used as an
7820 intermediate target unless it is safe. If no target, use a
7825 else if (original_target
7826 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7827 || (singleton && GET_CODE (original_target) == REG
7828 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7829 && original_target == var_rtx (singleton)))
7830 && GET_MODE (original_target) == mode
7831 #ifdef HAVE_conditional_move
7832 && (! can_conditionally_move_p (mode)
7833 || GET_CODE (original_target) == REG
7834 || TREE_ADDRESSABLE (type))
7836 && ! (GET_CODE (original_target) == MEM
7837 && MEM_VOLATILE_P (original_target)))
7838 temp = original_target;
7839 else if (TREE_ADDRESSABLE (type))
7842 temp = assign_temp (type, 0, 0, 1);
7844 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7845 do the test of X as a store-flag operation, do this as
7846 A + ((X != 0) << log C). Similarly for other simple binary
7847 operators. Only do for C == 1 if BRANCH_COST is low. */
7848 if (temp && singleton && binary_op
7849 && (TREE_CODE (binary_op) == PLUS_EXPR
7850 || TREE_CODE (binary_op) == MINUS_EXPR
7851 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7852 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7853 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7854 : integer_onep (TREE_OPERAND (binary_op, 1)))
7855 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7858 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7859 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7860 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7863 /* If we had X ? A : A + 1, do this as A + (X == 0).
7865 We have to invert the truth value here and then put it
7866 back later if do_store_flag fails. We cannot simply copy
7867 TREE_OPERAND (exp, 0) to another variable and modify that
7868 because invert_truthvalue can modify the tree pointed to
7870 if (singleton == TREE_OPERAND (exp, 1))
7871 TREE_OPERAND (exp, 0)
7872 = invert_truthvalue (TREE_OPERAND (exp, 0));
7874 result = do_store_flag (TREE_OPERAND (exp, 0),
7875 (safe_from_p (temp, singleton, 1)
7877 mode, BRANCH_COST <= 1);
7879 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7880 result = expand_shift (LSHIFT_EXPR, mode, result,
7881 build_int_2 (tree_log2
7885 (safe_from_p (temp, singleton, 1)
7886 ? temp : NULL_RTX), 0);
7890 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7891 return expand_binop (mode, boptab, op1, result, temp,
7892 unsignedp, OPTAB_LIB_WIDEN);
7894 else if (singleton == TREE_OPERAND (exp, 1))
7895 TREE_OPERAND (exp, 0)
7896 = invert_truthvalue (TREE_OPERAND (exp, 0));
7899 do_pending_stack_adjust ();
7901 op0 = gen_label_rtx ();
7903 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7907 /* If the target conflicts with the other operand of the
7908 binary op, we can't use it. Also, we can't use the target
7909 if it is a hard register, because evaluating the condition
7910 might clobber it. */
7912 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7913 || (GET_CODE (temp) == REG
7914 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7915 temp = gen_reg_rtx (mode);
7916 store_expr (singleton, temp, 0);
7919 expand_expr (singleton,
7920 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7921 if (singleton == TREE_OPERAND (exp, 1))
7922 jumpif (TREE_OPERAND (exp, 0), op0);
7924 jumpifnot (TREE_OPERAND (exp, 0), op0);
7926 start_cleanup_deferral ();
7927 if (binary_op && temp == 0)
7928 /* Just touch the other operand. */
7929 expand_expr (TREE_OPERAND (binary_op, 1),
7930 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7932 store_expr (build (TREE_CODE (binary_op), type,
7933 make_tree (type, temp),
7934 TREE_OPERAND (binary_op, 1)),
7937 store_expr (build1 (TREE_CODE (unary_op), type,
7938 make_tree (type, temp)),
7942 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7943 comparison operator. If we have one of these cases, set the
7944 output to A, branch on A (cse will merge these two references),
7945 then set the output to FOO. */
7947 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7948 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7949 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7950 TREE_OPERAND (exp, 1), 0)
7951 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7952 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7953 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7955 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7956 temp = gen_reg_rtx (mode);
7957 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7958 jumpif (TREE_OPERAND (exp, 0), op0);
7960 start_cleanup_deferral ();
7961 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7965 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7966 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7967 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7968 TREE_OPERAND (exp, 2), 0)
7969 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7970 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7971 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7973 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7974 temp = gen_reg_rtx (mode);
7975 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7976 jumpifnot (TREE_OPERAND (exp, 0), op0);
7978 start_cleanup_deferral ();
7979 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7984 op1 = gen_label_rtx ();
7985 jumpifnot (TREE_OPERAND (exp, 0), op0);
7987 start_cleanup_deferral ();
7989 /* One branch of the cond can be void, if it never returns. For
7990 example A ? throw : E */
7992 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7993 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7995 expand_expr (TREE_OPERAND (exp, 1),
7996 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7997 end_cleanup_deferral ();
7999 emit_jump_insn (gen_jump (op1));
8002 start_cleanup_deferral ();
8004 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8005 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8007 expand_expr (TREE_OPERAND (exp, 2),
8008 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8011 end_cleanup_deferral ();
8022 /* Something needs to be initialized, but we didn't know
8023 where that thing was when building the tree. For example,
8024 it could be the return value of a function, or a parameter
8025 to a function which lays down in the stack, or a temporary
8026 variable which must be passed by reference.
8028 We guarantee that the expression will either be constructed
8029 or copied into our original target. */
8031 tree slot = TREE_OPERAND (exp, 0);
8032 tree cleanups = NULL_TREE;
8035 if (TREE_CODE (slot) != VAR_DECL)
8039 target = original_target;
8041 /* Set this here so that if we get a target that refers to a
8042 register variable that's already been used, put_reg_into_stack
8043 knows that it should fix up those uses. */
8044 TREE_USED (slot) = 1;
8048 if (DECL_RTL (slot) != 0)
8050 target = DECL_RTL (slot);
8051 /* If we have already expanded the slot, so don't do
8053 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8058 target = assign_temp (type, 2, 0, 1);
8059 /* All temp slots at this level must not conflict. */
8060 preserve_temp_slots (target);
8061 DECL_RTL (slot) = target;
8062 if (TREE_ADDRESSABLE (slot))
8064 TREE_ADDRESSABLE (slot) = 0;
8065 mark_addressable (slot);
8068 /* Since SLOT is not known to the called function
8069 to belong to its stack frame, we must build an explicit
8070 cleanup. This case occurs when we must build up a reference
8071 to pass the reference as an argument. In this case,
8072 it is very likely that such a reference need not be
8075 if (TREE_OPERAND (exp, 2) == 0)
8076 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8077 cleanups = TREE_OPERAND (exp, 2);
8082 /* This case does occur, when expanding a parameter which
8083 needs to be constructed on the stack. The target
8084 is the actual stack address that we want to initialize.
8085 The function we call will perform the cleanup in this case. */
8087 /* If we have already assigned it space, use that space,
8088 not target that we were passed in, as our target
8089 parameter is only a hint. */
8090 if (DECL_RTL (slot) != 0)
8092 target = DECL_RTL (slot);
8093 /* If we have already expanded the slot, so don't do
8095 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8100 DECL_RTL (slot) = target;
8101 /* If we must have an addressable slot, then make sure that
8102 the RTL that we just stored in slot is OK. */
8103 if (TREE_ADDRESSABLE (slot))
8105 TREE_ADDRESSABLE (slot) = 0;
8106 mark_addressable (slot);
8111 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8112 /* Mark it as expanded. */
8113 TREE_OPERAND (exp, 1) = NULL_TREE;
8115 store_expr (exp1, target, 0);
8117 expand_decl_cleanup (NULL_TREE, cleanups);
8124 tree lhs = TREE_OPERAND (exp, 0);
8125 tree rhs = TREE_OPERAND (exp, 1);
8126 tree noncopied_parts = 0;
8127 tree lhs_type = TREE_TYPE (lhs);
8129 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8130 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8131 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8132 TYPE_NONCOPIED_PARTS (lhs_type));
8133 while (noncopied_parts != 0)
8135 expand_assignment (TREE_VALUE (noncopied_parts),
8136 TREE_PURPOSE (noncopied_parts), 0, 0);
8137 noncopied_parts = TREE_CHAIN (noncopied_parts);
8144 /* If lhs is complex, expand calls in rhs before computing it.
8145 That's so we don't compute a pointer and save it over a call.
8146 If lhs is simple, compute it first so we can give it as a
8147 target if the rhs is just a call. This avoids an extra temp and copy
8148 and that prevents a partial-subsumption which makes bad code.
8149 Actually we could treat component_ref's of vars like vars. */
8151 tree lhs = TREE_OPERAND (exp, 0);
8152 tree rhs = TREE_OPERAND (exp, 1);
8153 tree noncopied_parts = 0;
8154 tree lhs_type = TREE_TYPE (lhs);
8158 if (TREE_CODE (lhs) != VAR_DECL
8159 && TREE_CODE (lhs) != RESULT_DECL
8160 && TREE_CODE (lhs) != PARM_DECL
8161 && ! (TREE_CODE (lhs) == INDIRECT_REF
8162 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8163 preexpand_calls (exp);
8165 /* Check for |= or &= of a bitfield of size one into another bitfield
8166 of size 1. In this case, (unless we need the result of the
8167 assignment) we can do this more efficiently with a
8168 test followed by an assignment, if necessary.
8170 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8171 things change so we do, this code should be enhanced to
8174 && TREE_CODE (lhs) == COMPONENT_REF
8175 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8176 || TREE_CODE (rhs) == BIT_AND_EXPR)
8177 && TREE_OPERAND (rhs, 0) == lhs
8178 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8179 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8180 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8182 rtx label = gen_label_rtx ();
8184 do_jump (TREE_OPERAND (rhs, 1),
8185 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8186 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8187 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8188 (TREE_CODE (rhs) == BIT_IOR_EXPR
8190 : integer_zero_node)),
8192 do_pending_stack_adjust ();
8197 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8198 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8199 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8200 TYPE_NONCOPIED_PARTS (lhs_type));
8202 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8203 while (noncopied_parts != 0)
8205 expand_assignment (TREE_PURPOSE (noncopied_parts),
8206 TREE_VALUE (noncopied_parts), 0, 0);
8207 noncopied_parts = TREE_CHAIN (noncopied_parts);
8213 if (!TREE_OPERAND (exp, 0))
8214 expand_null_return ();
8216 expand_return (TREE_OPERAND (exp, 0));
8219 case PREINCREMENT_EXPR:
8220 case PREDECREMENT_EXPR:
8221 return expand_increment (exp, 0, ignore);
8223 case POSTINCREMENT_EXPR:
8224 case POSTDECREMENT_EXPR:
8225 /* Faster to treat as pre-increment if result is not used. */
8226 return expand_increment (exp, ! ignore, ignore);
8229 /* If nonzero, TEMP will be set to the address of something that might
8230 be a MEM corresponding to a stack slot. */
8233 /* Are we taking the address of a nested function? */
8234 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8235 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8236 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8237 && ! TREE_STATIC (exp))
8239 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8240 op0 = force_operand (op0, target);
8242 /* If we are taking the address of something erroneous, just
8244 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8248 /* We make sure to pass const0_rtx down if we came in with
8249 ignore set, to avoid doing the cleanups twice for something. */
8250 op0 = expand_expr (TREE_OPERAND (exp, 0),
8251 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8252 (modifier == EXPAND_INITIALIZER
8253 ? modifier : EXPAND_CONST_ADDRESS));
8255 /* If we are going to ignore the result, OP0 will have been set
8256 to const0_rtx, so just return it. Don't get confused and
8257 think we are taking the address of the constant. */
8261 op0 = protect_from_queue (op0, 0);
8263 /* We would like the object in memory. If it is a constant, we can
8264 have it be statically allocated into memory. For a non-constant,
8265 we need to allocate some memory and store the value into it. */
8267 if (CONSTANT_P (op0))
8268 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8270 else if (GET_CODE (op0) == MEM)
8272 mark_temp_addr_taken (op0);
8273 temp = XEXP (op0, 0);
8276 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8277 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8279 /* If this object is in a register, it must be not
8281 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8282 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8284 mark_temp_addr_taken (memloc);
8285 emit_move_insn (memloc, op0);
8289 if (GET_CODE (op0) != MEM)
8292 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8294 temp = XEXP (op0, 0);
8295 #ifdef POINTERS_EXTEND_UNSIGNED
8296 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8297 && mode == ptr_mode)
8298 temp = convert_memory_address (ptr_mode, temp);
8303 op0 = force_operand (XEXP (op0, 0), target);
8306 if (flag_force_addr && GET_CODE (op0) != REG)
8307 op0 = force_reg (Pmode, op0);
8309 if (GET_CODE (op0) == REG
8310 && ! REG_USERVAR_P (op0))
8311 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8313 /* If we might have had a temp slot, add an equivalent address
8316 update_temp_slot_address (temp, op0);
8318 #ifdef POINTERS_EXTEND_UNSIGNED
8319 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8320 && mode == ptr_mode)
8321 op0 = convert_memory_address (ptr_mode, op0);
8326 case ENTRY_VALUE_EXPR:
8329 /* COMPLEX type for Extended Pascal & Fortran */
8332 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8335 /* Get the rtx code of the operands. */
8336 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8337 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8340 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8344 /* Move the real (op0) and imaginary (op1) parts to their location. */
8345 emit_move_insn (gen_realpart (mode, target), op0);
8346 emit_move_insn (gen_imagpart (mode, target), op1);
8348 insns = get_insns ();
8351 /* Complex construction should appear as a single unit. */
8352 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8353 each with a separate pseudo as destination.
8354 It's not correct for flow to treat them as a unit. */
8355 if (GET_CODE (target) != CONCAT)
8356 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8364 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8365 return gen_realpart (mode, op0);
8368 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8369 return gen_imagpart (mode, op0);
8373 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8377 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8380 target = gen_reg_rtx (mode);
8384 /* Store the realpart and the negated imagpart to target. */
8385 emit_move_insn (gen_realpart (partmode, target),
8386 gen_realpart (partmode, op0));
8388 imag_t = gen_imagpart (partmode, target);
8389 temp = expand_unop (partmode, neg_optab,
8390 gen_imagpart (partmode, op0), imag_t, 0);
8392 emit_move_insn (imag_t, temp);
8394 insns = get_insns ();
8397 /* Conjugate should appear as a single unit
8398 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8399 each with a separate pseudo as destination.
8400 It's not correct for flow to treat them as a unit. */
8401 if (GET_CODE (target) != CONCAT)
8402 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8409 case TRY_CATCH_EXPR:
8411 tree handler = TREE_OPERAND (exp, 1);
8413 expand_eh_region_start ();
8415 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8417 expand_eh_region_end (handler);
8422 case TRY_FINALLY_EXPR:
8424 tree try_block = TREE_OPERAND (exp, 0);
8425 tree finally_block = TREE_OPERAND (exp, 1);
8426 rtx finally_label = gen_label_rtx ();
8427 rtx done_label = gen_label_rtx ();
8428 rtx return_link = gen_reg_rtx (Pmode);
8429 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8430 (tree) finally_label, (tree) return_link);
8431 TREE_SIDE_EFFECTS (cleanup) = 1;
8433 /* Start a new binding layer that will keep track of all cleanup
8434 actions to be performed. */
8435 expand_start_bindings (2);
8437 target_temp_slot_level = temp_slot_level;
8439 expand_decl_cleanup (NULL_TREE, cleanup);
8440 op0 = expand_expr (try_block, target, tmode, modifier);
8442 preserve_temp_slots (op0);
8443 expand_end_bindings (NULL_TREE, 0, 0);
8444 emit_jump (done_label);
8445 emit_label (finally_label);
8446 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8447 emit_indirect_jump (return_link);
8448 emit_label (done_label);
8452 case GOTO_SUBROUTINE_EXPR:
8454 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8455 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8456 rtx return_address = gen_label_rtx ();
8457 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8459 emit_label (return_address);
8465 rtx dcc = get_dynamic_cleanup_chain ();
8466 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8472 rtx dhc = get_dynamic_handler_chain ();
8473 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8478 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8481 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8484 /* Here to do an ordinary binary operator, generating an instruction
8485 from the optab already placed in `this_optab'. */
8487 preexpand_calls (exp);
8488 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8490 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8491 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8493 temp = expand_binop (mode, this_optab, op0, op1, target,
8494 unsignedp, OPTAB_LIB_WIDEN);
8500 /* Similar to expand_expr, except that we don't specify a target, target
8501 mode, or modifier and we return the alignment of the inner type. This is
8502 used in cases where it is not necessary to align the result to the
8503 alignment of its type as long as we know the alignment of the result, for
8504 example for comparisons of BLKmode values. */
8507 expand_expr_unaligned (exp, palign)
8509 unsigned int *palign;
8512 tree type = TREE_TYPE (exp);
8513 register enum machine_mode mode = TYPE_MODE (type);
8515 /* Default the alignment we return to that of the type. */
8516 *palign = TYPE_ALIGN (type);
8518 /* The only cases in which we do anything special is if the resulting mode
8520 if (mode != BLKmode)
8521 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8523 switch (TREE_CODE (exp))
8527 case NON_LVALUE_EXPR:
8528 /* Conversions between BLKmode values don't change the underlying
8529 alignment or value. */
8530 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8531 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8535 /* Much of the code for this case is copied directly from expand_expr.
8536 We need to duplicate it here because we will do something different
8537 in the fall-through case, so we need to handle the same exceptions
8540 tree array = TREE_OPERAND (exp, 0);
8541 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8542 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8543 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8546 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8549 /* Optimize the special-case of a zero lower bound.
8551 We convert the low_bound to sizetype to avoid some problems
8552 with constant folding. (E.g. suppose the lower bound is 1,
8553 and its mode is QI. Without the conversion, (ARRAY
8554 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8555 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8557 if (! integer_zerop (low_bound))
8558 index = size_diffop (index, convert (sizetype, low_bound));
8560 /* If this is a constant index into a constant array,
8561 just get the value from the array. Handle both the cases when
8562 we have an explicit constructor and when our operand is a variable
8563 that was declared const. */
8565 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8566 && 0 > compare_tree_int (index,
8567 list_length (CONSTRUCTOR_ELTS
8568 (TREE_OPERAND (exp, 0)))))
8572 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8573 i = TREE_INT_CST_LOW (index);
8574 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8578 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8581 else if (optimize >= 1
8582 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8583 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8584 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8586 if (TREE_CODE (index) == INTEGER_CST)
8588 tree init = DECL_INITIAL (array);
8590 if (TREE_CODE (init) == CONSTRUCTOR)
8594 for (elem = CONSTRUCTOR_ELTS (init);
8595 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8596 elem = TREE_CHAIN (elem))
8600 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8607 /* ... fall through ... */
8611 /* If the operand is a CONSTRUCTOR, we can just extract the
8612 appropriate field if it is present. Don't do this if we have
8613 already written the data since we want to refer to that copy
8614 and varasm.c assumes that's what we'll do. */
8615 if (TREE_CODE (exp) != ARRAY_REF
8616 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8617 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8621 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8622 elt = TREE_CHAIN (elt))
8623 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8624 /* Note that unlike the case in expand_expr, we know this is
8625 BLKmode and hence not an integer. */
8626 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8630 enum machine_mode mode1;
8631 HOST_WIDE_INT bitsize, bitpos;
8634 unsigned int alignment;
8636 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8637 &mode1, &unsignedp, &volatilep,
8640 /* If we got back the original object, something is wrong. Perhaps
8641 we are evaluating an expression too early. In any event, don't
8642 infinitely recurse. */
8646 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8648 /* If this is a constant, put it into a register if it is a
8649 legitimate constant and OFFSET is 0 and memory if it isn't. */
8650 if (CONSTANT_P (op0))
8652 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8654 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8656 op0 = force_reg (inner_mode, op0);
8658 op0 = validize_mem (force_const_mem (inner_mode, op0));
8663 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8665 /* If this object is in a register, put it into memory.
8666 This case can't occur in C, but can in Ada if we have
8667 unchecked conversion of an expression from a scalar type to
8668 an array or record type. */
8669 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8670 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8672 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8674 mark_temp_addr_taken (memloc);
8675 emit_move_insn (memloc, op0);
8679 if (GET_CODE (op0) != MEM)
8682 if (GET_MODE (offset_rtx) != ptr_mode)
8684 #ifdef POINTERS_EXTEND_UNSIGNED
8685 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8687 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8691 op0 = change_address (op0, VOIDmode,
8692 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8693 force_reg (ptr_mode,
8697 /* Don't forget about volatility even if this is a bitfield. */
8698 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8700 op0 = copy_rtx (op0);
8701 MEM_VOLATILE_P (op0) = 1;
8704 /* Check the access. */
8705 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8710 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8711 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8713 /* Check the access right of the pointer. */
8714 if (size > BITS_PER_UNIT)
8715 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8716 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8717 TYPE_MODE (sizetype),
8718 GEN_INT (MEMORY_USE_RO),
8719 TYPE_MODE (integer_type_node));
8722 /* In cases where an aligned union has an unaligned object
8723 as a field, we might be extracting a BLKmode value from
8724 an integer-mode (e.g., SImode) object. Handle this case
8725 by doing the extract into an object as wide as the field
8726 (which we know to be the width of a basic mode), then
8727 storing into memory, and changing the mode to BLKmode.
8728 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8729 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8730 if (mode1 == VOIDmode
8731 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8732 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8733 && (TYPE_ALIGN (type) > alignment
8734 || bitpos % TYPE_ALIGN (type) != 0)))
8736 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8738 if (ext_mode == BLKmode)
8740 /* In this case, BITPOS must start at a byte boundary. */
8741 if (GET_CODE (op0) != MEM
8742 || bitpos % BITS_PER_UNIT != 0)
8745 op0 = change_address (op0, VOIDmode,
8746 plus_constant (XEXP (op0, 0),
8747 bitpos / BITS_PER_UNIT));
8751 rtx new = assign_stack_temp (ext_mode,
8752 bitsize / BITS_PER_UNIT, 0);
8754 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8755 unsignedp, NULL_RTX, ext_mode,
8756 ext_mode, alignment,
8757 int_size_in_bytes (TREE_TYPE (tem)));
8759 /* If the result is a record type and BITSIZE is narrower than
8760 the mode of OP0, an integral mode, and this is a big endian
8761 machine, we must put the field into the high-order bits. */
8762 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8763 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8764 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8765 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8766 size_int (GET_MODE_BITSIZE
8772 emit_move_insn (new, op0);
8773 op0 = copy_rtx (new);
8774 PUT_MODE (op0, BLKmode);
8778 /* Get a reference to just this component. */
8779 op0 = change_address (op0, mode1,
8780 plus_constant (XEXP (op0, 0),
8781 (bitpos / BITS_PER_UNIT)));
8783 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8785 /* Adjust the alignment in case the bit position is not
8786 a multiple of the alignment of the inner object. */
8787 while (bitpos % alignment != 0)
8790 if (GET_CODE (XEXP (op0, 0)) == REG)
8791 mark_reg_pointer (XEXP (op0, 0), alignment);
8793 MEM_IN_STRUCT_P (op0) = 1;
8794 MEM_VOLATILE_P (op0) |= volatilep;
8796 *palign = alignment;
8805 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8808 /* Return the tree node if a ARG corresponds to a string constant or zero
8809 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8810 in bytes within the string that ARG is accessing. The type of the
8811 offset will be `sizetype'. */
8814 string_constant (arg, ptr_offset)
8820 if (TREE_CODE (arg) == ADDR_EXPR
8821 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8823 *ptr_offset = size_zero_node;
8824 return TREE_OPERAND (arg, 0);
8826 else if (TREE_CODE (arg) == PLUS_EXPR)
8828 tree arg0 = TREE_OPERAND (arg, 0);
8829 tree arg1 = TREE_OPERAND (arg, 1);
8834 if (TREE_CODE (arg0) == ADDR_EXPR
8835 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8837 *ptr_offset = convert (sizetype, arg1);
8838 return TREE_OPERAND (arg0, 0);
8840 else if (TREE_CODE (arg1) == ADDR_EXPR
8841 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8843 *ptr_offset = convert (sizetype, arg0);
8844 return TREE_OPERAND (arg1, 0);
8851 /* Expand code for a post- or pre- increment or decrement
8852 and return the RTX for the result.
8853 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8856 expand_increment (exp, post, ignore)
8860 register rtx op0, op1;
8861 register rtx temp, value;
8862 register tree incremented = TREE_OPERAND (exp, 0);
8863 optab this_optab = add_optab;
8865 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8866 int op0_is_copy = 0;
8867 int single_insn = 0;
8868 /* 1 means we can't store into OP0 directly,
8869 because it is a subreg narrower than a word,
8870 and we don't dare clobber the rest of the word. */
8873 /* Stabilize any component ref that might need to be
8874 evaluated more than once below. */
8876 || TREE_CODE (incremented) == BIT_FIELD_REF
8877 || (TREE_CODE (incremented) == COMPONENT_REF
8878 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8879 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8880 incremented = stabilize_reference (incremented);
8881 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8882 ones into save exprs so that they don't accidentally get evaluated
8883 more than once by the code below. */
8884 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8885 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8886 incremented = save_expr (incremented);
8888 /* Compute the operands as RTX.
8889 Note whether OP0 is the actual lvalue or a copy of it:
8890 I believe it is a copy iff it is a register or subreg
8891 and insns were generated in computing it. */
8893 temp = get_last_insn ();
8894 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8896 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8897 in place but instead must do sign- or zero-extension during assignment,
8898 so we copy it into a new register and let the code below use it as
8901 Note that we can safely modify this SUBREG since it is know not to be
8902 shared (it was made by the expand_expr call above). */
8904 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8907 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8911 else if (GET_CODE (op0) == SUBREG
8912 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8914 /* We cannot increment this SUBREG in place. If we are
8915 post-incrementing, get a copy of the old value. Otherwise,
8916 just mark that we cannot increment in place. */
8918 op0 = copy_to_reg (op0);
8923 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8924 && temp != get_last_insn ());
8925 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8926 EXPAND_MEMORY_USE_BAD);
8928 /* Decide whether incrementing or decrementing. */
8929 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8930 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8931 this_optab = sub_optab;
8933 /* Convert decrement by a constant into a negative increment. */
8934 if (this_optab == sub_optab
8935 && GET_CODE (op1) == CONST_INT)
8937 op1 = GEN_INT (- INTVAL (op1));
8938 this_optab = add_optab;
8941 /* For a preincrement, see if we can do this with a single instruction. */
8944 icode = (int) this_optab->handlers[(int) mode].insn_code;
8945 if (icode != (int) CODE_FOR_nothing
8946 /* Make sure that OP0 is valid for operands 0 and 1
8947 of the insn we want to queue. */
8948 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8949 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8950 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8954 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8955 then we cannot just increment OP0. We must therefore contrive to
8956 increment the original value. Then, for postincrement, we can return
8957 OP0 since it is a copy of the old value. For preincrement, expand here
8958 unless we can do it with a single insn.
8960 Likewise if storing directly into OP0 would clobber high bits
8961 we need to preserve (bad_subreg). */
8962 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8964 /* This is the easiest way to increment the value wherever it is.
8965 Problems with multiple evaluation of INCREMENTED are prevented
8966 because either (1) it is a component_ref or preincrement,
8967 in which case it was stabilized above, or (2) it is an array_ref
8968 with constant index in an array in a register, which is
8969 safe to reevaluate. */
8970 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8971 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8972 ? MINUS_EXPR : PLUS_EXPR),
8975 TREE_OPERAND (exp, 1));
8977 while (TREE_CODE (incremented) == NOP_EXPR
8978 || TREE_CODE (incremented) == CONVERT_EXPR)
8980 newexp = convert (TREE_TYPE (incremented), newexp);
8981 incremented = TREE_OPERAND (incremented, 0);
8984 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8985 return post ? op0 : temp;
8990 /* We have a true reference to the value in OP0.
8991 If there is an insn to add or subtract in this mode, queue it.
8992 Queueing the increment insn avoids the register shuffling
8993 that often results if we must increment now and first save
8994 the old value for subsequent use. */
8996 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8997 op0 = stabilize (op0);
9000 icode = (int) this_optab->handlers[(int) mode].insn_code;
9001 if (icode != (int) CODE_FOR_nothing
9002 /* Make sure that OP0 is valid for operands 0 and 1
9003 of the insn we want to queue. */
9004 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9005 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9007 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9008 op1 = force_reg (mode, op1);
9010 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9012 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9014 rtx addr = (general_operand (XEXP (op0, 0), mode)
9015 ? force_reg (Pmode, XEXP (op0, 0))
9016 : copy_to_reg (XEXP (op0, 0)));
9019 op0 = change_address (op0, VOIDmode, addr);
9020 temp = force_reg (GET_MODE (op0), op0);
9021 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9022 op1 = force_reg (mode, op1);
9024 /* The increment queue is LIFO, thus we have to `queue'
9025 the instructions in reverse order. */
9026 enqueue_insn (op0, gen_move_insn (op0, temp));
9027 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9032 /* Preincrement, or we can't increment with one simple insn. */
9034 /* Save a copy of the value before inc or dec, to return it later. */
9035 temp = value = copy_to_reg (op0);
9037 /* Arrange to return the incremented value. */
9038 /* Copy the rtx because expand_binop will protect from the queue,
9039 and the results of that would be invalid for us to return
9040 if our caller does emit_queue before using our result. */
9041 temp = copy_rtx (value = op0);
9043 /* Increment however we can. */
9044 op1 = expand_binop (mode, this_optab, value, op1,
9045 current_function_check_memory_usage ? NULL_RTX : op0,
9046 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9047 /* Make sure the value is stored into OP0. */
9049 emit_move_insn (op0, op1);
9054 /* Expand all function calls contained within EXP, innermost ones first.
9055 But don't look within expressions that have sequence points.
9056 For each CALL_EXPR, record the rtx for its value
9057 in the CALL_EXPR_RTL field. */
9060 preexpand_calls (exp)
9063 register int nops, i;
9064 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9066 if (! do_preexpand_calls)
9069 /* Only expressions and references can contain calls. */
9071 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9074 switch (TREE_CODE (exp))
9077 /* Do nothing if already expanded. */
9078 if (CALL_EXPR_RTL (exp) != 0
9079 /* Do nothing if the call returns a variable-sized object. */
9080 || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
9081 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9082 /* Do nothing to built-in functions. */
9083 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9084 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9086 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9089 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9094 case TRUTH_ANDIF_EXPR:
9095 case TRUTH_ORIF_EXPR:
9096 /* If we find one of these, then we can be sure
9097 the adjust will be done for it (since it makes jumps).
9098 Do it now, so that if this is inside an argument
9099 of a function, we don't get the stack adjustment
9100 after some other args have already been pushed. */
9101 do_pending_stack_adjust ();
9106 case WITH_CLEANUP_EXPR:
9107 case CLEANUP_POINT_EXPR:
9108 case TRY_CATCH_EXPR:
9112 if (SAVE_EXPR_RTL (exp) != 0)
9119 nops = tree_code_length[(int) TREE_CODE (exp)];
9120 for (i = 0; i < nops; i++)
9121 if (TREE_OPERAND (exp, i) != 0)
9123 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9124 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9125 It doesn't happen before the call is made. */
9129 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9130 if (type == 'e' || type == '<' || type == '1' || type == '2'
9132 preexpand_calls (TREE_OPERAND (exp, i));
9137 /* At the start of a function, record that we have no previously-pushed
9138 arguments waiting to be popped. */
9141 init_pending_stack_adjust ()
9143 pending_stack_adjust = 0;
9146 /* When exiting from function, if safe, clear out any pending stack adjust
9147 so the adjustment won't get done.
9149 Note, if the current function calls alloca, then it must have a
9150 frame pointer regardless of the value of flag_omit_frame_pointer. */
9153 clear_pending_stack_adjust ()
9155 #ifdef EXIT_IGNORE_STACK
9157 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9158 && EXIT_IGNORE_STACK
9159 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9160 && ! flag_inline_functions)
9162 stack_pointer_delta -= pending_stack_adjust,
9163 pending_stack_adjust = 0;
9168 /* Pop any previously-pushed arguments that have not been popped yet. */
9171 do_pending_stack_adjust ()
9173 if (inhibit_defer_pop == 0)
9175 if (pending_stack_adjust != 0)
9176 adjust_stack (GEN_INT (pending_stack_adjust));
9177 pending_stack_adjust = 0;
9181 /* Expand conditional expressions. */
9183 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9184 LABEL is an rtx of code CODE_LABEL, in this function and all the
9188 jumpifnot (exp, label)
9192 do_jump (exp, label, NULL_RTX);
9195 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9202 do_jump (exp, NULL_RTX, label);
9205 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9206 the result is zero, or IF_TRUE_LABEL if the result is one.
9207 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9208 meaning fall through in that case.
9210 do_jump always does any pending stack adjust except when it does not
9211 actually perform a jump. An example where there is no jump
9212 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9214 This function is responsible for optimizing cases such as
9215 &&, || and comparison operators in EXP. */
9218 do_jump (exp, if_false_label, if_true_label)
9220 rtx if_false_label, if_true_label;
9222 register enum tree_code code = TREE_CODE (exp);
9223 /* Some cases need to create a label to jump to
9224 in order to properly fall through.
9225 These cases set DROP_THROUGH_LABEL nonzero. */
9226 rtx drop_through_label = 0;
9230 enum machine_mode mode;
9232 #ifdef MAX_INTEGER_COMPUTATION_MODE
9233 check_max_integer_computation_mode (exp);
9244 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9250 /* This is not true with #pragma weak */
9252 /* The address of something can never be zero. */
9254 emit_jump (if_true_label);
9259 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9260 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9261 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9264 /* If we are narrowing the operand, we have to do the compare in the
9266 if ((TYPE_PRECISION (TREE_TYPE (exp))
9267 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9269 case NON_LVALUE_EXPR:
9270 case REFERENCE_EXPR:
9275 /* These cannot change zero->non-zero or vice versa. */
9276 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9279 case WITH_RECORD_EXPR:
9280 /* Put the object on the placeholder list, recurse through our first
9281 operand, and pop the list. */
9282 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9284 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9285 placeholder_list = TREE_CHAIN (placeholder_list);
9289 /* This is never less insns than evaluating the PLUS_EXPR followed by
9290 a test and can be longer if the test is eliminated. */
9292 /* Reduce to minus. */
9293 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9294 TREE_OPERAND (exp, 0),
9295 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9296 TREE_OPERAND (exp, 1))));
9297 /* Process as MINUS. */
9301 /* Non-zero iff operands of minus differ. */
9302 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9303 TREE_OPERAND (exp, 0),
9304 TREE_OPERAND (exp, 1)),
9305 NE, NE, if_false_label, if_true_label);
9309 /* If we are AND'ing with a small constant, do this comparison in the
9310 smallest type that fits. If the machine doesn't have comparisons
9311 that small, it will be converted back to the wider comparison.
9312 This helps if we are testing the sign bit of a narrower object.
9313 combine can't do this for us because it can't know whether a
9314 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9316 if (! SLOW_BYTE_ACCESS
9317 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9318 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9319 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9320 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9321 && (type = type_for_mode (mode, 1)) != 0
9322 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9323 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9324 != CODE_FOR_nothing))
9326 do_jump (convert (type, exp), if_false_label, if_true_label);
9331 case TRUTH_NOT_EXPR:
9332 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9335 case TRUTH_ANDIF_EXPR:
9336 if (if_false_label == 0)
9337 if_false_label = drop_through_label = gen_label_rtx ();
9338 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9339 start_cleanup_deferral ();
9340 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9341 end_cleanup_deferral ();
9344 case TRUTH_ORIF_EXPR:
9345 if (if_true_label == 0)
9346 if_true_label = drop_through_label = gen_label_rtx ();
9347 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9348 start_cleanup_deferral ();
9349 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9350 end_cleanup_deferral ();
9355 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9356 preserve_temp_slots (NULL_RTX);
9360 do_pending_stack_adjust ();
9361 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9368 HOST_WIDE_INT bitsize, bitpos;
9370 enum machine_mode mode;
9374 unsigned int alignment;
9376 /* Get description of this reference. We don't actually care
9377 about the underlying object here. */
9378 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9379 &unsignedp, &volatilep, &alignment);
9381 type = type_for_size (bitsize, unsignedp);
9382 if (! SLOW_BYTE_ACCESS
9383 && type != 0 && bitsize >= 0
9384 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9385 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9386 != CODE_FOR_nothing))
9388 do_jump (convert (type, exp), if_false_label, if_true_label);
9395 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9396 if (integer_onep (TREE_OPERAND (exp, 1))
9397 && integer_zerop (TREE_OPERAND (exp, 2)))
9398 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9400 else if (integer_zerop (TREE_OPERAND (exp, 1))
9401 && integer_onep (TREE_OPERAND (exp, 2)))
9402 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9406 register rtx label1 = gen_label_rtx ();
9407 drop_through_label = gen_label_rtx ();
9409 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9411 start_cleanup_deferral ();
9412 /* Now the THEN-expression. */
9413 do_jump (TREE_OPERAND (exp, 1),
9414 if_false_label ? if_false_label : drop_through_label,
9415 if_true_label ? if_true_label : drop_through_label);
9416 /* In case the do_jump just above never jumps. */
9417 do_pending_stack_adjust ();
9418 emit_label (label1);
9420 /* Now the ELSE-expression. */
9421 do_jump (TREE_OPERAND (exp, 2),
9422 if_false_label ? if_false_label : drop_through_label,
9423 if_true_label ? if_true_label : drop_through_label);
9424 end_cleanup_deferral ();
9430 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9432 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9433 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9435 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9436 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9439 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9440 fold (build (EQ_EXPR, TREE_TYPE (exp),
9441 fold (build1 (REALPART_EXPR,
9442 TREE_TYPE (inner_type),
9444 fold (build1 (REALPART_EXPR,
9445 TREE_TYPE (inner_type),
9447 fold (build (EQ_EXPR, TREE_TYPE (exp),
9448 fold (build1 (IMAGPART_EXPR,
9449 TREE_TYPE (inner_type),
9451 fold (build1 (IMAGPART_EXPR,
9452 TREE_TYPE (inner_type),
9454 if_false_label, if_true_label);
9457 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9458 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9460 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9461 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9462 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9464 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9470 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9472 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9473 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9475 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9476 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9479 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9480 fold (build (NE_EXPR, TREE_TYPE (exp),
9481 fold (build1 (REALPART_EXPR,
9482 TREE_TYPE (inner_type),
9484 fold (build1 (REALPART_EXPR,
9485 TREE_TYPE (inner_type),
9487 fold (build (NE_EXPR, TREE_TYPE (exp),
9488 fold (build1 (IMAGPART_EXPR,
9489 TREE_TYPE (inner_type),
9491 fold (build1 (IMAGPART_EXPR,
9492 TREE_TYPE (inner_type),
9494 if_false_label, if_true_label);
9497 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9498 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9500 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9501 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9502 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9504 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9509 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9510 if (GET_MODE_CLASS (mode) == MODE_INT
9511 && ! can_compare_p (LT, mode, ccp_jump))
9512 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9514 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9518 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9519 if (GET_MODE_CLASS (mode) == MODE_INT
9520 && ! can_compare_p (LE, mode, ccp_jump))
9521 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9523 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9527 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9528 if (GET_MODE_CLASS (mode) == MODE_INT
9529 && ! can_compare_p (GT, mode, ccp_jump))
9530 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9532 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9536 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9537 if (GET_MODE_CLASS (mode) == MODE_INT
9538 && ! can_compare_p (GE, mode, ccp_jump))
9539 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9541 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9544 case UNORDERED_EXPR:
9547 enum rtx_code cmp, rcmp;
9550 if (code == UNORDERED_EXPR)
9551 cmp = UNORDERED, rcmp = ORDERED;
9553 cmp = ORDERED, rcmp = UNORDERED;
9554 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9557 if (! can_compare_p (cmp, mode, ccp_jump)
9558 && (can_compare_p (rcmp, mode, ccp_jump)
9559 /* If the target doesn't provide either UNORDERED or ORDERED
9560 comparisons, canonicalize on UNORDERED for the library. */
9561 || rcmp == UNORDERED))
9565 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9567 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9572 enum rtx_code rcode1;
9573 enum tree_code tcode2;
9597 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9598 if (can_compare_p (rcode1, mode, ccp_jump))
9599 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9603 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9604 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9607 /* If the target doesn't support combined unordered
9608 compares, decompose into UNORDERED + comparison. */
9609 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9610 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9611 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9612 do_jump (exp, if_false_label, if_true_label);
9619 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9621 /* This is not needed any more and causes poor code since it causes
9622 comparisons and tests from non-SI objects to have different code
9624 /* Copy to register to avoid generating bad insns by cse
9625 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9626 if (!cse_not_expected && GET_CODE (temp) == MEM)
9627 temp = copy_to_reg (temp);
9629 do_pending_stack_adjust ();
9630 /* Do any postincrements in the expression that was tested. */
9633 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9635 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9639 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9640 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9641 /* Note swapping the labels gives us not-equal. */
9642 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9643 else if (GET_MODE (temp) != VOIDmode)
9644 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9645 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9646 GET_MODE (temp), NULL_RTX, 0,
9647 if_false_label, if_true_label);
9652 if (drop_through_label)
9654 /* If do_jump produces code that might be jumped around,
9655 do any stack adjusts from that code, before the place
9656 where control merges in. */
9657 do_pending_stack_adjust ();
9658 emit_label (drop_through_label);
9662 /* Given a comparison expression EXP for values too wide to be compared
9663 with one insn, test the comparison and jump to the appropriate label.
9664 The code of EXP is ignored; we always test GT if SWAP is 0,
9665 and LT if SWAP is 1. */
9668 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9671 rtx if_false_label, if_true_label;
9673 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9674 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9675 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9676 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9678 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9681 /* Compare OP0 with OP1, word at a time, in mode MODE.
9682 UNSIGNEDP says to do unsigned comparison.
9683 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9686 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9687 enum machine_mode mode;
9690 rtx if_false_label, if_true_label;
9692 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9693 rtx drop_through_label = 0;
9696 if (! if_true_label || ! if_false_label)
9697 drop_through_label = gen_label_rtx ();
9698 if (! if_true_label)
9699 if_true_label = drop_through_label;
9700 if (! if_false_label)
9701 if_false_label = drop_through_label;
9703 /* Compare a word at a time, high order first. */
9704 for (i = 0; i < nwords; i++)
9706 rtx op0_word, op1_word;
9708 if (WORDS_BIG_ENDIAN)
9710 op0_word = operand_subword_force (op0, i, mode);
9711 op1_word = operand_subword_force (op1, i, mode);
9715 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9716 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9719 /* All but high-order word must be compared as unsigned. */
9720 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9721 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9722 NULL_RTX, if_true_label);
9724 /* Consider lower words only if these are equal. */
9725 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9726 NULL_RTX, 0, NULL_RTX, if_false_label);
9730 emit_jump (if_false_label);
9731 if (drop_through_label)
9732 emit_label (drop_through_label);
9735 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9736 with one insn, test the comparison and jump to the appropriate label. */
9739 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9741 rtx if_false_label, if_true_label;
9743 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9744 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9745 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9746 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9748 rtx drop_through_label = 0;
9750 if (! if_false_label)
9751 drop_through_label = if_false_label = gen_label_rtx ();
9753 for (i = 0; i < nwords; i++)
9754 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9755 operand_subword_force (op1, i, mode),
9756 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9757 word_mode, NULL_RTX, 0, if_false_label,
9761 emit_jump (if_true_label);
9762 if (drop_through_label)
9763 emit_label (drop_through_label);
9766 /* Jump according to whether OP0 is 0.
9767 We assume that OP0 has an integer mode that is too wide
9768 for the available compare insns. */
9771 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9773 rtx if_false_label, if_true_label;
9775 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9778 rtx drop_through_label = 0;
9780 /* The fastest way of doing this comparison on almost any machine is to
9781 "or" all the words and compare the result. If all have to be loaded
9782 from memory and this is a very wide item, it's possible this may
9783 be slower, but that's highly unlikely. */
9785 part = gen_reg_rtx (word_mode);
9786 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9787 for (i = 1; i < nwords && part != 0; i++)
9788 part = expand_binop (word_mode, ior_optab, part,
9789 operand_subword_force (op0, i, GET_MODE (op0)),
9790 part, 1, OPTAB_WIDEN);
9794 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9795 NULL_RTX, 0, if_false_label, if_true_label);
9800 /* If we couldn't do the "or" simply, do this with a series of compares. */
9801 if (! if_false_label)
9802 drop_through_label = if_false_label = gen_label_rtx ();
9804 for (i = 0; i < nwords; i++)
9805 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9806 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9807 if_false_label, NULL_RTX);
9810 emit_jump (if_true_label);
9812 if (drop_through_label)
9813 emit_label (drop_through_label);
9816 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9817 (including code to compute the values to be compared)
9818 and set (CC0) according to the result.
9819 The decision as to signed or unsigned comparison must be made by the caller.
9821 We force a stack adjustment unless there are currently
9822 things pushed on the stack that aren't yet used.
9824 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9827 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9828 size of MODE should be used. */
9831 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9832 register rtx op0, op1;
9835 enum machine_mode mode;
9841 /* If one operand is constant, make it the second one. Only do this
9842 if the other operand is not constant as well. */
9844 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9845 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9850 code = swap_condition (code);
9855 op0 = force_not_mem (op0);
9856 op1 = force_not_mem (op1);
9859 do_pending_stack_adjust ();
9861 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9862 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9866 /* There's no need to do this now that combine.c can eliminate lots of
9867 sign extensions. This can be less efficient in certain cases on other
9870 /* If this is a signed equality comparison, we can do it as an
9871 unsigned comparison since zero-extension is cheaper than sign
9872 extension and comparisons with zero are done as unsigned. This is
9873 the case even on machines that can do fast sign extension, since
9874 zero-extension is easier to combine with other operations than
9875 sign-extension is. If we are comparing against a constant, we must
9876 convert it to what it would look like unsigned. */
9877 if ((code == EQ || code == NE) && ! unsignedp
9878 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9880 if (GET_CODE (op1) == CONST_INT
9881 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9882 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9887 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9889 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9892 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9893 The decision as to signed or unsigned comparison must be made by the caller.
9895 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9898 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9899 size of MODE should be used. */
9902 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9903 if_false_label, if_true_label)
9904 register rtx op0, op1;
9907 enum machine_mode mode;
9910 rtx if_false_label, if_true_label;
9913 int dummy_true_label = 0;
9915 /* Reverse the comparison if that is safe and we want to jump if it is
9917 if (! if_true_label && ! FLOAT_MODE_P (mode))
9919 if_true_label = if_false_label;
9921 code = reverse_condition (code);
9924 /* If one operand is constant, make it the second one. Only do this
9925 if the other operand is not constant as well. */
9927 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9928 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9933 code = swap_condition (code);
9938 op0 = force_not_mem (op0);
9939 op1 = force_not_mem (op1);
9942 do_pending_stack_adjust ();
9944 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9945 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9947 if (tem == const_true_rtx)
9950 emit_jump (if_true_label);
9955 emit_jump (if_false_label);
9961 /* There's no need to do this now that combine.c can eliminate lots of
9962 sign extensions. This can be less efficient in certain cases on other
9965 /* If this is a signed equality comparison, we can do it as an
9966 unsigned comparison since zero-extension is cheaper than sign
9967 extension and comparisons with zero are done as unsigned. This is
9968 the case even on machines that can do fast sign extension, since
9969 zero-extension is easier to combine with other operations than
9970 sign-extension is. If we are comparing against a constant, we must
9971 convert it to what it would look like unsigned. */
9972 if ((code == EQ || code == NE) && ! unsignedp
9973 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9975 if (GET_CODE (op1) == CONST_INT
9976 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9977 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9982 if (! if_true_label)
9984 dummy_true_label = 1;
9985 if_true_label = gen_label_rtx ();
9988 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9992 emit_jump (if_false_label);
9993 if (dummy_true_label)
9994 emit_label (if_true_label);
9997 /* Generate code for a comparison expression EXP (including code to compute
9998 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9999 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10000 generated code will drop through.
10001 SIGNED_CODE should be the rtx operation for this comparison for
10002 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10004 We force a stack adjustment unless there are currently
10005 things pushed on the stack that aren't yet used. */
10008 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10011 enum rtx_code signed_code, unsigned_code;
10012 rtx if_false_label, if_true_label;
10014 unsigned int align0, align1;
10015 register rtx op0, op1;
10016 register tree type;
10017 register enum machine_mode mode;
10019 enum rtx_code code;
10021 /* Don't crash if the comparison was erroneous. */
10022 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10023 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10026 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10027 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10028 mode = TYPE_MODE (type);
10029 unsignedp = TREE_UNSIGNED (type);
10030 code = unsignedp ? unsigned_code : signed_code;
10032 #ifdef HAVE_canonicalize_funcptr_for_compare
10033 /* If function pointers need to be "canonicalized" before they can
10034 be reliably compared, then canonicalize them. */
10035 if (HAVE_canonicalize_funcptr_for_compare
10036 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10037 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10040 rtx new_op0 = gen_reg_rtx (mode);
10042 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10046 if (HAVE_canonicalize_funcptr_for_compare
10047 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10048 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10051 rtx new_op1 = gen_reg_rtx (mode);
10053 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10058 /* Do any postincrements in the expression that was tested. */
10061 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10063 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10064 MIN (align0, align1),
10065 if_false_label, if_true_label);
10068 /* Generate code to calculate EXP using a store-flag instruction
10069 and return an rtx for the result. EXP is either a comparison
10070 or a TRUTH_NOT_EXPR whose operand is a comparison.
10072 If TARGET is nonzero, store the result there if convenient.
10074 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10077 Return zero if there is no suitable set-flag instruction
10078 available on this machine.
10080 Once expand_expr has been called on the arguments of the comparison,
10081 we are committed to doing the store flag, since it is not safe to
10082 re-evaluate the expression. We emit the store-flag insn by calling
10083 emit_store_flag, but only expand the arguments if we have a reason
10084 to believe that emit_store_flag will be successful. If we think that
10085 it will, but it isn't, we have to simulate the store-flag with a
10086 set/jump/set sequence. */
10089 do_store_flag (exp, target, mode, only_cheap)
10092 enum machine_mode mode;
10095 enum rtx_code code;
10096 tree arg0, arg1, type;
10098 enum machine_mode operand_mode;
10102 enum insn_code icode;
10103 rtx subtarget = target;
10106 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10107 result at the end. We can't simply invert the test since it would
10108 have already been inverted if it were valid. This case occurs for
10109 some floating-point comparisons. */
10111 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10112 invert = 1, exp = TREE_OPERAND (exp, 0);
10114 arg0 = TREE_OPERAND (exp, 0);
10115 arg1 = TREE_OPERAND (exp, 1);
10116 type = TREE_TYPE (arg0);
10117 operand_mode = TYPE_MODE (type);
10118 unsignedp = TREE_UNSIGNED (type);
10120 /* We won't bother with BLKmode store-flag operations because it would mean
10121 passing a lot of information to emit_store_flag. */
10122 if (operand_mode == BLKmode)
10125 /* We won't bother with store-flag operations involving function pointers
10126 when function pointers must be canonicalized before comparisons. */
10127 #ifdef HAVE_canonicalize_funcptr_for_compare
10128 if (HAVE_canonicalize_funcptr_for_compare
10129 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10130 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10132 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10133 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10134 == FUNCTION_TYPE))))
10141 /* Get the rtx comparison code to use. We know that EXP is a comparison
10142 operation of some type. Some comparisons against 1 and -1 can be
10143 converted to comparisons with zero. Do so here so that the tests
10144 below will be aware that we have a comparison with zero. These
10145 tests will not catch constants in the first operand, but constants
10146 are rarely passed as the first operand. */
10148 switch (TREE_CODE (exp))
10157 if (integer_onep (arg1))
10158 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10160 code = unsignedp ? LTU : LT;
10163 if (! unsignedp && integer_all_onesp (arg1))
10164 arg1 = integer_zero_node, code = LT;
10166 code = unsignedp ? LEU : LE;
10169 if (! unsignedp && integer_all_onesp (arg1))
10170 arg1 = integer_zero_node, code = GE;
10172 code = unsignedp ? GTU : GT;
10175 if (integer_onep (arg1))
10176 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10178 code = unsignedp ? GEU : GE;
10181 case UNORDERED_EXPR:
10207 /* Put a constant second. */
10208 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10210 tem = arg0; arg0 = arg1; arg1 = tem;
10211 code = swap_condition (code);
10214 /* If this is an equality or inequality test of a single bit, we can
10215 do this by shifting the bit being tested to the low-order bit and
10216 masking the result with the constant 1. If the condition was EQ,
10217 we xor it with 1. This does not require an scc insn and is faster
10218 than an scc insn even if we have it. */
10220 if ((code == NE || code == EQ)
10221 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10222 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10224 tree inner = TREE_OPERAND (arg0, 0);
10225 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10228 /* If INNER is a right shift of a constant and it plus BITNUM does
10229 not overflow, adjust BITNUM and INNER. */
10231 if (TREE_CODE (inner) == RSHIFT_EXPR
10232 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10233 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10234 && bitnum < TYPE_PRECISION (type)
10235 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10236 bitnum - TYPE_PRECISION (type)))
10238 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10239 inner = TREE_OPERAND (inner, 0);
10242 /* If we are going to be able to omit the AND below, we must do our
10243 operations as unsigned. If we must use the AND, we have a choice.
10244 Normally unsigned is faster, but for some machines signed is. */
10245 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10246 #ifdef LOAD_EXTEND_OP
10247 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10253 if (subtarget == 0 || GET_CODE (subtarget) != REG
10254 || GET_MODE (subtarget) != operand_mode
10255 || ! safe_from_p (subtarget, inner, 1))
10258 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10261 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10262 size_int (bitnum), subtarget, ops_unsignedp);
10264 if (GET_MODE (op0) != mode)
10265 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10267 if ((code == EQ && ! invert) || (code == NE && invert))
10268 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10269 ops_unsignedp, OPTAB_LIB_WIDEN);
10271 /* Put the AND last so it can combine with more things. */
10272 if (bitnum != TYPE_PRECISION (type) - 1)
10273 op0 = expand_and (op0, const1_rtx, subtarget);
10278 /* Now see if we are likely to be able to do this. Return if not. */
10279 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10282 icode = setcc_gen_code[(int) code];
10283 if (icode == CODE_FOR_nothing
10284 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10286 /* We can only do this if it is one of the special cases that
10287 can be handled without an scc insn. */
10288 if ((code == LT && integer_zerop (arg1))
10289 || (! only_cheap && code == GE && integer_zerop (arg1)))
10291 else if (BRANCH_COST >= 0
10292 && ! only_cheap && (code == NE || code == EQ)
10293 && TREE_CODE (type) != REAL_TYPE
10294 && ((abs_optab->handlers[(int) operand_mode].insn_code
10295 != CODE_FOR_nothing)
10296 || (ffs_optab->handlers[(int) operand_mode].insn_code
10297 != CODE_FOR_nothing)))
10303 preexpand_calls (exp);
10304 if (subtarget == 0 || GET_CODE (subtarget) != REG
10305 || GET_MODE (subtarget) != operand_mode
10306 || ! safe_from_p (subtarget, arg1, 1))
10309 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10310 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10313 target = gen_reg_rtx (mode);
10315 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10316 because, if the emit_store_flag does anything it will succeed and
10317 OP0 and OP1 will not be used subsequently. */
10319 result = emit_store_flag (target, code,
10320 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10321 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10322 operand_mode, unsignedp, 1);
10327 result = expand_binop (mode, xor_optab, result, const1_rtx,
10328 result, 0, OPTAB_LIB_WIDEN);
10332 /* If this failed, we have to do this with set/compare/jump/set code. */
10333 if (GET_CODE (target) != REG
10334 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10335 target = gen_reg_rtx (GET_MODE (target));
10337 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10338 result = compare_from_rtx (op0, op1, code, unsignedp,
10339 operand_mode, NULL_RTX, 0);
10340 if (GET_CODE (result) == CONST_INT)
10341 return (((result == const0_rtx && ! invert)
10342 || (result != const0_rtx && invert))
10343 ? const0_rtx : const1_rtx);
10345 label = gen_label_rtx ();
10346 if (bcc_gen_fctn[(int) code] == 0)
10349 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10350 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10351 emit_label (label);
10356 /* Generate a tablejump instruction (used for switch statements). */
10358 #ifdef HAVE_tablejump
10360 /* INDEX is the value being switched on, with the lowest value
10361 in the table already subtracted.
10362 MODE is its expected mode (needed if INDEX is constant).
10363 RANGE is the length of the jump table.
10364 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10366 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10367 index value is out of range. */
10370 do_tablejump (index, mode, range, table_label, default_label)
10371 rtx index, range, table_label, default_label;
10372 enum machine_mode mode;
10374 register rtx temp, vector;
10376 /* Do an unsigned comparison (in the proper mode) between the index
10377 expression and the value which represents the length of the range.
10378 Since we just finished subtracting the lower bound of the range
10379 from the index expression, this comparison allows us to simultaneously
10380 check that the original index expression value is both greater than
10381 or equal to the minimum value of the range and less than or equal to
10382 the maximum value of the range. */
10384 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10387 /* If index is in range, it must fit in Pmode.
10388 Convert to Pmode so we can index with it. */
10390 index = convert_to_mode (Pmode, index, 1);
10392 /* Don't let a MEM slip thru, because then INDEX that comes
10393 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10394 and break_out_memory_refs will go to work on it and mess it up. */
10395 #ifdef PIC_CASE_VECTOR_ADDRESS
10396 if (flag_pic && GET_CODE (index) != REG)
10397 index = copy_to_mode_reg (Pmode, index);
10400 /* If flag_force_addr were to affect this address
10401 it could interfere with the tricky assumptions made
10402 about addresses that contain label-refs,
10403 which may be valid only very near the tablejump itself. */
10404 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10405 GET_MODE_SIZE, because this indicates how large insns are. The other
10406 uses should all be Pmode, because they are addresses. This code
10407 could fail if addresses and insns are not the same size. */
10408 index = gen_rtx_PLUS (Pmode,
10409 gen_rtx_MULT (Pmode, index,
10410 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10411 gen_rtx_LABEL_REF (Pmode, table_label));
10412 #ifdef PIC_CASE_VECTOR_ADDRESS
10414 index = PIC_CASE_VECTOR_ADDRESS (index);
10417 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10418 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10419 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10420 RTX_UNCHANGING_P (vector) = 1;
10421 convert_move (temp, vector, 0);
10423 emit_jump_insn (gen_tablejump (temp, table_label));
10425 /* If we are generating PIC code or if the table is PC-relative, the
10426 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10427 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10431 #endif /* HAVE_tablejump */