1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
52 /* Supply a default definition for PUSH_ARGS. */
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
79 #define STACK_PUSH_CODE PRE_INC
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
88 /* Hook called by safe_from_p for language-specific tree codes. It is
89 up to the language front-end to install a hook if it has any such
90 codes that safe_from_p needs to know about. Since same_from_p will
91 recursively explore the TREE_OPERANDs of an expression, this hook
92 should not reexamine those pieces. This routine may recursively
93 call safe_from_p; it should always pass `0' as the TOP_P
95 int (*lang_safe_from_p) PARAMS ((rtx, tree));
97 /* If this is nonzero, we do not bother generating VOLATILE
98 around volatile memory references, and we are willing to
99 output indirect addresses. If cse is to follow, we reject
100 indirect addresses so a useful potential cse is generated;
101 if it is used only once, instruction combination will produce
102 the same indirect address eventually. */
103 int cse_not_expected;
105 /* Don't check memory usage, since code is being emitted to check a memory
106 usage. Used when current_function_check_memory_usage is true, to avoid
107 infinite recursion. */
108 static int in_check_memory_usage;
110 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
111 static tree placeholder_list = 0;
113 /* This structure is used by move_by_pieces to describe the move to
115 struct move_by_pieces
124 int explicit_inc_from;
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
130 /* This structure is used by store_by_pieces to describe the clear to
133 struct store_by_pieces
139 unsigned HOST_WIDE_INT len;
140 HOST_WIDE_INT offset;
141 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
146 extern struct obstack permanent_obstack;
148 static rtx get_push_address PARAMS ((int));
150 static rtx enqueue_insn PARAMS ((rtx, rtx));
151 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
152 PARAMS ((unsigned HOST_WIDE_INT,
154 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
155 struct move_by_pieces *));
156 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
158 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
160 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
162 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
164 struct store_by_pieces *));
165 static rtx get_subtarget PARAMS ((rtx));
166 static int is_zeros_p PARAMS ((tree));
167 static int mostly_zeros_p PARAMS ((tree));
168 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
169 HOST_WIDE_INT, enum machine_mode,
170 tree, tree, unsigned int, int,
172 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
174 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
175 HOST_WIDE_INT, enum machine_mode,
176 tree, enum machine_mode, int,
177 unsigned int, HOST_WIDE_INT, int));
178 static enum memory_use_mode
179 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
180 static tree save_noncopied_parts PARAMS ((tree, tree));
181 static tree init_noncopied_parts PARAMS ((tree, tree));
182 static int fixed_type_p PARAMS ((tree));
183 static rtx var_rtx PARAMS ((tree));
184 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
185 static rtx expand_increment PARAMS ((tree, int, int));
186 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
187 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
188 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
190 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
192 /* Record for each mode whether we can move a register directly to or
193 from an object of that mode in memory. If we can't, we won't try
194 to use that mode directly when accessing a field of that mode. */
196 static char direct_load[NUM_MACHINE_MODES];
197 static char direct_store[NUM_MACHINE_MODES];
199 /* If a memory-to-memory move would take MOVE_RATIO or more simple
200 move-instruction sequences, we will do a movstr or libcall instead. */
203 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
206 /* If we are optimizing for space (-Os), cut down the default move ratio. */
207 #define MOVE_RATIO (optimize_size ? 3 : 15)
211 /* This macro is used to determine whether move_by_pieces should be called
212 to perform a structure copy. */
213 #ifndef MOVE_BY_PIECES_P
214 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
215 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
218 /* This array records the insn_code of insns to perform block moves. */
219 enum insn_code movstr_optab[NUM_MACHINE_MODES];
221 /* This array records the insn_code of insns to perform block clears. */
222 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
224 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
226 #ifndef SLOW_UNALIGNED_ACCESS
227 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
230 /* This is run once per compilation to set up which modes can be used
231 directly in memory and to initialize the block move optab. */
237 enum machine_mode mode;
243 /* Try indexing by frame ptr and try by stack ptr.
244 It is known that on the Convex the stack ptr isn't a valid index.
245 With luck, one or the other is valid on any machine. */
246 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
247 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
249 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
250 pat = PATTERN (insn);
252 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
253 mode = (enum machine_mode) ((int) mode + 1))
258 direct_load[(int) mode] = direct_store[(int) mode] = 0;
259 PUT_MODE (mem, mode);
260 PUT_MODE (mem1, mode);
262 /* See if there is some register that can be used in this mode and
263 directly loaded or stored from memory. */
265 if (mode != VOIDmode && mode != BLKmode)
266 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
267 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
270 if (! HARD_REGNO_MODE_OK (regno, mode))
273 reg = gen_rtx_REG (mode, regno);
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
280 SET_SRC (pat) = mem1;
281 SET_DEST (pat) = reg;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_load[(int) mode] = 1;
286 SET_DEST (pat) = mem;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
291 SET_DEST (pat) = mem1;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_store[(int) mode] = 1;
300 /* This is run at the start of compiling a function. */
305 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
308 pending_stack_adjust = 0;
309 stack_pointer_delta = 0;
310 inhibit_defer_pop = 0;
312 apply_args_value = 0;
318 struct expr_status *p;
323 ggc_mark_rtx (p->x_saveregs_value);
324 ggc_mark_rtx (p->x_apply_args_value);
325 ggc_mark_rtx (p->x_forced_labels);
336 /* Small sanity check that the queue is empty at the end of a function. */
339 finish_expr_for_function ()
345 /* Manage the queue of increment instructions to be output
346 for POSTINCREMENT_EXPR expressions, etc. */
348 /* Queue up to increment (or change) VAR later. BODY says how:
349 BODY should be the same thing you would pass to emit_insn
350 to increment right away. It will go to emit_insn later on.
352 The value is a QUEUED expression to be used in place of VAR
353 where you want to guarantee the pre-incrementation value of VAR. */
356 enqueue_insn (var, body)
359 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
360 body, pending_chain);
361 return pending_chain;
364 /* Use protect_from_queue to convert a QUEUED expression
365 into something that you can put immediately into an instruction.
366 If the queued incrementation has not happened yet,
367 protect_from_queue returns the variable itself.
368 If the incrementation has happened, protect_from_queue returns a temp
369 that contains a copy of the old value of the variable.
371 Any time an rtx which might possibly be a QUEUED is to be put
372 into an instruction, it must be passed through protect_from_queue first.
373 QUEUED expressions are not meaningful in instructions.
375 Do not pass a value through protect_from_queue and then hold
376 on to it for a while before putting it in an instruction!
377 If the queue is flushed in between, incorrect code will result. */
380 protect_from_queue (x, modify)
384 register RTX_CODE code = GET_CODE (x);
386 #if 0 /* A QUEUED can hang around after the queue is forced out. */
387 /* Shortcut for most common case. */
388 if (pending_chain == 0)
394 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
395 use of autoincrement. Make a copy of the contents of the memory
396 location rather than a copy of the address, but not if the value is
397 of mode BLKmode. Don't modify X in place since it might be
399 if (code == MEM && GET_MODE (x) != BLKmode
400 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
402 register rtx y = XEXP (x, 0);
403 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
405 MEM_COPY_ATTRIBUTES (new, x);
409 register rtx temp = gen_reg_rtx (GET_MODE (new));
410 emit_insn_before (gen_move_insn (temp, new),
416 /* Otherwise, recursively protect the subexpressions of all
417 the kinds of rtx's that can contain a QUEUED. */
420 rtx tem = protect_from_queue (XEXP (x, 0), 0);
421 if (tem != XEXP (x, 0))
427 else if (code == PLUS || code == MULT)
429 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
430 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
431 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
440 /* If the increment has not happened, use the variable itself. */
441 if (QUEUED_INSN (x) == 0)
442 return QUEUED_VAR (x);
443 /* If the increment has happened and a pre-increment copy exists,
445 if (QUEUED_COPY (x) != 0)
446 return QUEUED_COPY (x);
447 /* The increment has happened but we haven't set up a pre-increment copy.
448 Set one up now, and use it. */
449 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
450 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
452 return QUEUED_COPY (x);
455 /* Return nonzero if X contains a QUEUED expression:
456 if it contains anything that will be altered by a queued increment.
457 We handle only combinations of MEM, PLUS, MINUS and MULT operators
458 since memory addresses generally contain only those. */
464 register enum rtx_code code = GET_CODE (x);
470 return queued_subexp_p (XEXP (x, 0));
474 return (queued_subexp_p (XEXP (x, 0))
475 || queued_subexp_p (XEXP (x, 1)));
481 /* Perform all the pending incrementations. */
487 while ((p = pending_chain))
489 rtx body = QUEUED_BODY (p);
491 if (GET_CODE (body) == SEQUENCE)
493 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
494 emit_insn (QUEUED_BODY (p));
497 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
498 pending_chain = QUEUED_NEXT (p);
502 /* Copy data from FROM to TO, where the machine modes are not the same.
503 Both modes may be integer, or both may be floating.
504 UNSIGNEDP should be nonzero if FROM is an unsigned type.
505 This causes zero-extension instead of sign-extension. */
508 convert_move (to, from, unsignedp)
509 register rtx to, from;
512 enum machine_mode to_mode = GET_MODE (to);
513 enum machine_mode from_mode = GET_MODE (from);
514 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
515 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
519 /* rtx code for making an equivalent value. */
520 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
522 to = protect_from_queue (to, 1);
523 from = protect_from_queue (from, 0);
525 if (to_real != from_real)
528 /* If FROM is a SUBREG that indicates that we have already done at least
529 the required extension, strip it. We don't handle such SUBREGs as
532 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
533 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
534 >= GET_MODE_SIZE (to_mode))
535 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
536 from = gen_lowpart (to_mode, from), from_mode = to_mode;
538 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
541 if (to_mode == from_mode
542 || (from_mode == VOIDmode && CONSTANT_P (from)))
544 emit_move_insn (to, from);
548 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
550 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
553 if (VECTOR_MODE_P (to_mode))
554 from = gen_rtx_SUBREG (to_mode, from, 0);
556 to = gen_rtx_SUBREG (from_mode, to, 0);
558 emit_move_insn (to, from);
562 if (to_real != from_real)
569 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
571 /* Try converting directly if the insn is supported. */
572 if ((code = can_extend_p (to_mode, from_mode, 0))
575 emit_unop_insn (code, to, from, UNKNOWN);
580 #ifdef HAVE_trunchfqf2
581 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
583 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
587 #ifdef HAVE_trunctqfqf2
588 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
590 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
594 #ifdef HAVE_truncsfqf2
595 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
597 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
601 #ifdef HAVE_truncdfqf2
602 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
604 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
608 #ifdef HAVE_truncxfqf2
609 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
611 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
615 #ifdef HAVE_trunctfqf2
616 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
618 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
623 #ifdef HAVE_trunctqfhf2
624 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
626 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
630 #ifdef HAVE_truncsfhf2
631 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
633 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
637 #ifdef HAVE_truncdfhf2
638 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
640 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
644 #ifdef HAVE_truncxfhf2
645 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
647 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
651 #ifdef HAVE_trunctfhf2
652 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
654 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
659 #ifdef HAVE_truncsftqf2
660 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
662 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
666 #ifdef HAVE_truncdftqf2
667 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
669 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
673 #ifdef HAVE_truncxftqf2
674 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
676 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
680 #ifdef HAVE_trunctftqf2
681 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
683 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
688 #ifdef HAVE_truncdfsf2
689 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
691 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
695 #ifdef HAVE_truncxfsf2
696 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
698 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
702 #ifdef HAVE_trunctfsf2
703 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
705 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
709 #ifdef HAVE_truncxfdf2
710 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
712 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
716 #ifdef HAVE_trunctfdf2
717 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
719 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
731 libcall = extendsfdf2_libfunc;
735 libcall = extendsfxf2_libfunc;
739 libcall = extendsftf2_libfunc;
751 libcall = truncdfsf2_libfunc;
755 libcall = extenddfxf2_libfunc;
759 libcall = extenddftf2_libfunc;
771 libcall = truncxfsf2_libfunc;
775 libcall = truncxfdf2_libfunc;
787 libcall = trunctfsf2_libfunc;
791 libcall = trunctfdf2_libfunc;
803 if (libcall == (rtx) 0)
804 /* This conversion is not implemented yet. */
808 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
810 insns = get_insns ();
812 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
817 /* Now both modes are integers. */
819 /* Handle expanding beyond a word. */
820 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
821 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
828 enum machine_mode lowpart_mode;
829 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
831 /* Try converting directly if the insn is supported. */
832 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
835 /* If FROM is a SUBREG, put it into a register. Do this
836 so that we always generate the same set of insns for
837 better cse'ing; if an intermediate assignment occurred,
838 we won't be doing the operation directly on the SUBREG. */
839 if (optimize > 0 && GET_CODE (from) == SUBREG)
840 from = force_reg (from_mode, from);
841 emit_unop_insn (code, to, from, equiv_code);
844 /* Next, try converting via full word. */
845 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
846 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
847 != CODE_FOR_nothing))
849 if (GET_CODE (to) == REG)
850 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
851 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
852 emit_unop_insn (code, to,
853 gen_lowpart (word_mode, to), equiv_code);
857 /* No special multiword conversion insn; do it by hand. */
860 /* Since we will turn this into a no conflict block, we must ensure
861 that the source does not overlap the target. */
863 if (reg_overlap_mentioned_p (to, from))
864 from = force_reg (from_mode, from);
866 /* Get a copy of FROM widened to a word, if necessary. */
867 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
868 lowpart_mode = word_mode;
870 lowpart_mode = from_mode;
872 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
874 lowpart = gen_lowpart (lowpart_mode, to);
875 emit_move_insn (lowpart, lowfrom);
877 /* Compute the value to put in each remaining word. */
879 fill_value = const0_rtx;
884 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
885 && STORE_FLAG_VALUE == -1)
887 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
889 fill_value = gen_reg_rtx (word_mode);
890 emit_insn (gen_slt (fill_value));
896 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
897 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
899 fill_value = convert_to_mode (word_mode, fill_value, 1);
903 /* Fill the remaining words. */
904 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
906 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
907 rtx subword = operand_subword (to, index, 1, to_mode);
912 if (fill_value != subword)
913 emit_move_insn (subword, fill_value);
916 insns = get_insns ();
919 emit_no_conflict_block (insns, to, from, NULL_RTX,
920 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
924 /* Truncating multi-word to a word or less. */
925 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
926 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
928 if (!((GET_CODE (from) == MEM
929 && ! MEM_VOLATILE_P (from)
930 && direct_load[(int) to_mode]
931 && ! mode_dependent_address_p (XEXP (from, 0)))
932 || GET_CODE (from) == REG
933 || GET_CODE (from) == SUBREG))
934 from = force_reg (from_mode, from);
935 convert_move (to, gen_lowpart (word_mode, from), 0);
939 /* Handle pointer conversion. */ /* SPEE 900220. */
940 if (to_mode == PQImode)
942 if (from_mode != QImode)
943 from = convert_to_mode (QImode, from, unsignedp);
945 #ifdef HAVE_truncqipqi2
946 if (HAVE_truncqipqi2)
948 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
951 #endif /* HAVE_truncqipqi2 */
955 if (from_mode == PQImode)
957 if (to_mode != QImode)
959 from = convert_to_mode (QImode, from, unsignedp);
964 #ifdef HAVE_extendpqiqi2
965 if (HAVE_extendpqiqi2)
967 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
970 #endif /* HAVE_extendpqiqi2 */
975 if (to_mode == PSImode)
977 if (from_mode != SImode)
978 from = convert_to_mode (SImode, from, unsignedp);
980 #ifdef HAVE_truncsipsi2
981 if (HAVE_truncsipsi2)
983 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
986 #endif /* HAVE_truncsipsi2 */
990 if (from_mode == PSImode)
992 if (to_mode != SImode)
994 from = convert_to_mode (SImode, from, unsignedp);
999 #ifdef HAVE_extendpsisi2
1000 if (! unsignedp && HAVE_extendpsisi2)
1002 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1005 #endif /* HAVE_extendpsisi2 */
1006 #ifdef HAVE_zero_extendpsisi2
1007 if (unsignedp && HAVE_zero_extendpsisi2)
1009 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1012 #endif /* HAVE_zero_extendpsisi2 */
1017 if (to_mode == PDImode)
1019 if (from_mode != DImode)
1020 from = convert_to_mode (DImode, from, unsignedp);
1022 #ifdef HAVE_truncdipdi2
1023 if (HAVE_truncdipdi2)
1025 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1028 #endif /* HAVE_truncdipdi2 */
1032 if (from_mode == PDImode)
1034 if (to_mode != DImode)
1036 from = convert_to_mode (DImode, from, unsignedp);
1041 #ifdef HAVE_extendpdidi2
1042 if (HAVE_extendpdidi2)
1044 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1047 #endif /* HAVE_extendpdidi2 */
1052 /* Now follow all the conversions between integers
1053 no more than a word long. */
1055 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1056 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1057 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1058 GET_MODE_BITSIZE (from_mode)))
1060 if (!((GET_CODE (from) == MEM
1061 && ! MEM_VOLATILE_P (from)
1062 && direct_load[(int) to_mode]
1063 && ! mode_dependent_address_p (XEXP (from, 0)))
1064 || GET_CODE (from) == REG
1065 || GET_CODE (from) == SUBREG))
1066 from = force_reg (from_mode, from);
1067 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1068 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1069 from = copy_to_reg (from);
1070 emit_move_insn (to, gen_lowpart (to_mode, from));
1074 /* Handle extension. */
1075 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1077 /* Convert directly if that works. */
1078 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1079 != CODE_FOR_nothing)
1081 emit_unop_insn (code, to, from, equiv_code);
1086 enum machine_mode intermediate;
1090 /* Search for a mode to convert via. */
1091 for (intermediate = from_mode; intermediate != VOIDmode;
1092 intermediate = GET_MODE_WIDER_MODE (intermediate))
1093 if (((can_extend_p (to_mode, intermediate, unsignedp)
1094 != CODE_FOR_nothing)
1095 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1096 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1097 GET_MODE_BITSIZE (intermediate))))
1098 && (can_extend_p (intermediate, from_mode, unsignedp)
1099 != CODE_FOR_nothing))
1101 convert_move (to, convert_to_mode (intermediate, from,
1102 unsignedp), unsignedp);
1106 /* No suitable intermediate mode.
1107 Generate what we need with shifts. */
1108 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1109 - GET_MODE_BITSIZE (from_mode), 0);
1110 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1111 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1113 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1116 emit_move_insn (to, tmp);
1121 /* Support special truncate insns for certain modes. */
1123 if (from_mode == DImode && to_mode == SImode)
1125 #ifdef HAVE_truncdisi2
1126 if (HAVE_truncdisi2)
1128 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1132 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 if (from_mode == DImode && to_mode == HImode)
1138 #ifdef HAVE_truncdihi2
1139 if (HAVE_truncdihi2)
1141 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1145 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 if (from_mode == DImode && to_mode == QImode)
1151 #ifdef HAVE_truncdiqi2
1152 if (HAVE_truncdiqi2)
1154 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1158 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 if (from_mode == SImode && to_mode == HImode)
1164 #ifdef HAVE_truncsihi2
1165 if (HAVE_truncsihi2)
1167 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 if (from_mode == SImode && to_mode == QImode)
1177 #ifdef HAVE_truncsiqi2
1178 if (HAVE_truncsiqi2)
1180 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 if (from_mode == HImode && to_mode == QImode)
1190 #ifdef HAVE_trunchiqi2
1191 if (HAVE_trunchiqi2)
1193 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 if (from_mode == TImode && to_mode == DImode)
1203 #ifdef HAVE_trunctidi2
1204 if (HAVE_trunctidi2)
1206 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 if (from_mode == TImode && to_mode == SImode)
1216 #ifdef HAVE_trunctisi2
1217 if (HAVE_trunctisi2)
1219 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 if (from_mode == TImode && to_mode == HImode)
1229 #ifdef HAVE_trunctihi2
1230 if (HAVE_trunctihi2)
1232 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1236 convert_move (to, force_reg (from_mode, from), unsignedp);
1240 if (from_mode == TImode && to_mode == QImode)
1242 #ifdef HAVE_trunctiqi2
1243 if (HAVE_trunctiqi2)
1245 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1249 convert_move (to, force_reg (from_mode, from), unsignedp);
1253 /* Handle truncation of volatile memrefs, and so on;
1254 the things that couldn't be truncated directly,
1255 and for which there was no special instruction. */
1256 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1258 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1259 emit_move_insn (to, temp);
1263 /* Mode combination is not recognized. */
1267 /* Return an rtx for a value that would result
1268 from converting X to mode MODE.
1269 Both X and MODE may be floating, or both integer.
1270 UNSIGNEDP is nonzero if X is an unsigned value.
1271 This can be done by referring to a part of X in place
1272 or by copying to a new temporary with conversion.
1274 This function *must not* call protect_from_queue
1275 except when putting X into an insn (in which case convert_move does it). */
1278 convert_to_mode (mode, x, unsignedp)
1279 enum machine_mode mode;
1283 return convert_modes (mode, VOIDmode, x, unsignedp);
1286 /* Return an rtx for a value that would result
1287 from converting X from mode OLDMODE to mode MODE.
1288 Both modes may be floating, or both integer.
1289 UNSIGNEDP is nonzero if X is an unsigned value.
1291 This can be done by referring to a part of X in place
1292 or by copying to a new temporary with conversion.
1294 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1296 This function *must not* call protect_from_queue
1297 except when putting X into an insn (in which case convert_move does it). */
1300 convert_modes (mode, oldmode, x, unsignedp)
1301 enum machine_mode mode, oldmode;
1307 /* If FROM is a SUBREG that indicates that we have already done at least
1308 the required extension, strip it. */
1310 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1311 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1312 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1313 x = gen_lowpart (mode, x);
1315 if (GET_MODE (x) != VOIDmode)
1316 oldmode = GET_MODE (x);
1318 if (mode == oldmode)
1321 /* There is one case that we must handle specially: If we are converting
1322 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1323 we are to interpret the constant as unsigned, gen_lowpart will do
1324 the wrong if the constant appears negative. What we want to do is
1325 make the high-order word of the constant zero, not all ones. */
1327 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1328 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1329 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1331 HOST_WIDE_INT val = INTVAL (x);
1333 if (oldmode != VOIDmode
1334 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1336 int width = GET_MODE_BITSIZE (oldmode);
1338 /* We need to zero extend VAL. */
1339 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1342 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1345 /* We can do this with a gen_lowpart if both desired and current modes
1346 are integer, and this is either a constant integer, a register, or a
1347 non-volatile MEM. Except for the constant case where MODE is no
1348 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1350 if ((GET_CODE (x) == CONST_INT
1351 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1352 || (GET_MODE_CLASS (mode) == MODE_INT
1353 && GET_MODE_CLASS (oldmode) == MODE_INT
1354 && (GET_CODE (x) == CONST_DOUBLE
1355 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1356 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1357 && direct_load[(int) mode])
1358 || (GET_CODE (x) == REG
1359 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1360 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1362 /* ?? If we don't know OLDMODE, we have to assume here that
1363 X does not need sign- or zero-extension. This may not be
1364 the case, but it's the best we can do. */
1365 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1366 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1368 HOST_WIDE_INT val = INTVAL (x);
1369 int width = GET_MODE_BITSIZE (oldmode);
1371 /* We must sign or zero-extend in this case. Start by
1372 zero-extending, then sign extend if we need to. */
1373 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1375 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1376 val |= (HOST_WIDE_INT) (-1) << width;
1378 return GEN_INT (val);
1381 return gen_lowpart (mode, x);
1384 temp = gen_reg_rtx (mode);
1385 convert_move (temp, x, unsignedp);
1389 /* This macro is used to determine what the largest unit size that
1390 move_by_pieces can use is. */
1392 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1393 move efficiently, as opposed to MOVE_MAX which is the maximum
1394 number of bytes we can move with a single instruction. */
1396 #ifndef MOVE_MAX_PIECES
1397 #define MOVE_MAX_PIECES MOVE_MAX
1400 /* Generate several move instructions to copy LEN bytes
1401 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1402 The caller must pass FROM and TO
1403 through protect_from_queue before calling.
1404 ALIGN is maximum alignment we can assume. */
1407 move_by_pieces (to, from, len, align)
1409 unsigned HOST_WIDE_INT len;
1412 struct move_by_pieces data;
1413 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1414 unsigned int max_size = MOVE_MAX_PIECES + 1;
1415 enum machine_mode mode = VOIDmode, tmode;
1416 enum insn_code icode;
1419 data.to_addr = to_addr;
1420 data.from_addr = from_addr;
1424 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1425 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1427 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1428 || GET_CODE (from_addr) == POST_INC
1429 || GET_CODE (from_addr) == POST_DEC);
1431 data.explicit_inc_from = 0;
1432 data.explicit_inc_to = 0;
1434 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1435 if (data.reverse) data.offset = len;
1438 /* If copying requires more than two move insns,
1439 copy addresses to registers (to make displacements shorter)
1440 and use post-increment if available. */
1441 if (!(data.autinc_from && data.autinc_to)
1442 && move_by_pieces_ninsns (len, align) > 2)
1444 /* Find the mode of the largest move... */
1445 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1446 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1447 if (GET_MODE_SIZE (tmode) < max_size)
1450 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1452 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1453 data.autinc_from = 1;
1454 data.explicit_inc_from = -1;
1456 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 data.autinc_from = 1;
1460 data.explicit_inc_from = 1;
1462 if (!data.autinc_from && CONSTANT_P (from_addr))
1463 data.from_addr = copy_addr_to_reg (from_addr);
1464 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1466 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1468 data.explicit_inc_to = -1;
1470 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1472 data.to_addr = copy_addr_to_reg (to_addr);
1474 data.explicit_inc_to = 1;
1476 if (!data.autinc_to && CONSTANT_P (to_addr))
1477 data.to_addr = copy_addr_to_reg (to_addr);
1480 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1481 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1482 align = MOVE_MAX * BITS_PER_UNIT;
1484 /* First move what we can in the largest integer mode, then go to
1485 successively smaller modes. */
1487 while (max_size > 1)
1489 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1490 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1491 if (GET_MODE_SIZE (tmode) < max_size)
1494 if (mode == VOIDmode)
1497 icode = mov_optab->handlers[(int) mode].insn_code;
1498 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1499 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1501 max_size = GET_MODE_SIZE (mode);
1504 /* The code above should have handled everything. */
1509 /* Return number of insns required to move L bytes by pieces.
1510 ALIGN (in bytes) is maximum alignment we can assume. */
1512 static unsigned HOST_WIDE_INT
1513 move_by_pieces_ninsns (l, align)
1514 unsigned HOST_WIDE_INT l;
1517 unsigned HOST_WIDE_INT n_insns = 0;
1518 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1520 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1521 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1522 align = MOVE_MAX * BITS_PER_UNIT;
1524 while (max_size > 1)
1526 enum machine_mode mode = VOIDmode, tmode;
1527 enum insn_code icode;
1529 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1530 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1531 if (GET_MODE_SIZE (tmode) < max_size)
1534 if (mode == VOIDmode)
1537 icode = mov_optab->handlers[(int) mode].insn_code;
1538 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1539 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1541 max_size = GET_MODE_SIZE (mode);
1549 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1550 with move instructions for mode MODE. GENFUN is the gen_... function
1551 to make a move insn for that mode. DATA has all the other info. */
1554 move_by_pieces_1 (genfun, mode, data)
1555 rtx (*genfun) PARAMS ((rtx, ...));
1556 enum machine_mode mode;
1557 struct move_by_pieces *data;
1559 unsigned int size = GET_MODE_SIZE (mode);
1562 while (data->len >= size)
1565 data->offset -= size;
1567 if (data->autinc_to)
1569 to1 = gen_rtx_MEM (mode, data->to_addr);
1570 MEM_COPY_ATTRIBUTES (to1, data->to);
1573 to1 = change_address (data->to, mode,
1574 plus_constant (data->to_addr, data->offset));
1576 if (data->autinc_from)
1578 from1 = gen_rtx_MEM (mode, data->from_addr);
1579 MEM_COPY_ATTRIBUTES (from1, data->from);
1582 from1 = change_address (data->from, mode,
1583 plus_constant (data->from_addr, data->offset));
1585 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1586 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1587 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1588 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1590 emit_insn ((*genfun) (to1, from1));
1592 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1593 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1594 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1595 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1597 if (! data->reverse)
1598 data->offset += size;
1604 /* Emit code to move a block Y to a block X.
1605 This may be done with string-move instructions,
1606 with multiple scalar move instructions, or with a library call.
1608 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1610 SIZE is an rtx that says how long they are.
1611 ALIGN is the maximum alignment we can assume they have.
1613 Return the address of the new block, if memcpy is called and returns it,
1617 emit_block_move (x, y, size, align)
1623 #ifdef TARGET_MEM_FUNCTIONS
1625 tree call_expr, arg_list;
1628 if (GET_MODE (x) != BLKmode)
1631 if (GET_MODE (y) != BLKmode)
1634 x = protect_from_queue (x, 1);
1635 y = protect_from_queue (y, 0);
1636 size = protect_from_queue (size, 0);
1638 if (GET_CODE (x) != MEM)
1640 if (GET_CODE (y) != MEM)
1645 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1646 move_by_pieces (x, y, INTVAL (size), align);
1649 /* Try the most limited insn first, because there's no point
1650 including more than one in the machine description unless
1651 the more limited one has some advantage. */
1653 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1654 enum machine_mode mode;
1656 /* Since this is a move insn, we don't care about volatility. */
1659 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1660 mode = GET_MODE_WIDER_MODE (mode))
1662 enum insn_code code = movstr_optab[(int) mode];
1663 insn_operand_predicate_fn pred;
1665 if (code != CODE_FOR_nothing
1666 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1667 here because if SIZE is less than the mode mask, as it is
1668 returned by the macro, it will definitely be less than the
1669 actual mode mask. */
1670 && ((GET_CODE (size) == CONST_INT
1671 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1672 <= (GET_MODE_MASK (mode) >> 1)))
1673 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1674 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1675 || (*pred) (x, BLKmode))
1676 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1677 || (*pred) (y, BLKmode))
1678 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1679 || (*pred) (opalign, VOIDmode)))
1682 rtx last = get_last_insn ();
1685 op2 = convert_to_mode (mode, size, 1);
1686 pred = insn_data[(int) code].operand[2].predicate;
1687 if (pred != 0 && ! (*pred) (op2, mode))
1688 op2 = copy_to_mode_reg (mode, op2);
1690 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1698 delete_insns_since (last);
1704 /* X, Y, or SIZE may have been passed through protect_from_queue.
1706 It is unsafe to save the value generated by protect_from_queue
1707 and reuse it later. Consider what happens if emit_queue is
1708 called before the return value from protect_from_queue is used.
1710 Expansion of the CALL_EXPR below will call emit_queue before
1711 we are finished emitting RTL for argument setup. So if we are
1712 not careful we could get the wrong value for an argument.
1714 To avoid this problem we go ahead and emit code to copy X, Y &
1715 SIZE into new pseudos. We can then place those new pseudos
1716 into an RTL_EXPR and use them later, even after a call to
1719 Note this is not strictly needed for library calls since they
1720 do not call emit_queue before loading their arguments. However,
1721 we may need to have library calls call emit_queue in the future
1722 since failing to do so could cause problems for targets which
1723 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1724 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1725 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1727 #ifdef TARGET_MEM_FUNCTIONS
1728 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1730 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1731 TREE_UNSIGNED (integer_type_node));
1732 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1735 #ifdef TARGET_MEM_FUNCTIONS
1736 /* It is incorrect to use the libcall calling conventions to call
1737 memcpy in this context.
1739 This could be a user call to memcpy and the user may wish to
1740 examine the return value from memcpy.
1742 For targets where libcalls and normal calls have different conventions
1743 for returning pointers, we could end up generating incorrect code.
1745 So instead of using a libcall sequence we build up a suitable
1746 CALL_EXPR and expand the call in the normal fashion. */
1747 if (fn == NULL_TREE)
1751 /* This was copied from except.c, I don't know if all this is
1752 necessary in this context or not. */
1753 fn = get_identifier ("memcpy");
1754 fntype = build_pointer_type (void_type_node);
1755 fntype = build_function_type (fntype, NULL_TREE);
1756 fn = build_decl (FUNCTION_DECL, fn, fntype);
1757 ggc_add_tree_root (&fn, 1);
1758 DECL_EXTERNAL (fn) = 1;
1759 TREE_PUBLIC (fn) = 1;
1760 DECL_ARTIFICIAL (fn) = 1;
1761 make_decl_rtl (fn, NULL_PTR);
1762 assemble_external (fn);
1765 /* We need to make an argument list for the function call.
1767 memcpy has three arguments, the first two are void * addresses and
1768 the last is a size_t byte count for the copy. */
1770 = build_tree_list (NULL_TREE,
1771 make_tree (build_pointer_type (void_type_node), x));
1772 TREE_CHAIN (arg_list)
1773 = build_tree_list (NULL_TREE,
1774 make_tree (build_pointer_type (void_type_node), y));
1775 TREE_CHAIN (TREE_CHAIN (arg_list))
1776 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1777 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1779 /* Now we have to build up the CALL_EXPR itself. */
1780 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1781 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1782 call_expr, arg_list, NULL_TREE);
1783 TREE_SIDE_EFFECTS (call_expr) = 1;
1785 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1787 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1788 VOIDmode, 3, y, Pmode, x, Pmode,
1789 convert_to_mode (TYPE_MODE (integer_type_node), size,
1790 TREE_UNSIGNED (integer_type_node)),
1791 TYPE_MODE (integer_type_node));
1798 /* Copy all or part of a value X into registers starting at REGNO.
1799 The number of registers to be filled is NREGS. */
1802 move_block_to_reg (regno, x, nregs, mode)
1806 enum machine_mode mode;
1809 #ifdef HAVE_load_multiple
1817 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1818 x = validize_mem (force_const_mem (mode, x));
1820 /* See if the machine can do this with a load multiple insn. */
1821 #ifdef HAVE_load_multiple
1822 if (HAVE_load_multiple)
1824 last = get_last_insn ();
1825 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1833 delete_insns_since (last);
1837 for (i = 0; i < nregs; i++)
1838 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1839 operand_subword_force (x, i, mode));
1842 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1843 The number of registers to be filled is NREGS. SIZE indicates the number
1844 of bytes in the object X. */
1847 move_block_from_reg (regno, x, nregs, size)
1854 #ifdef HAVE_store_multiple
1858 enum machine_mode mode;
1863 /* If SIZE is that of a mode no bigger than a word, just use that
1864 mode's store operation. */
1865 if (size <= UNITS_PER_WORD
1866 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1868 emit_move_insn (change_address (x, mode, NULL),
1869 gen_rtx_REG (mode, regno));
1873 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1874 to the left before storing to memory. Note that the previous test
1875 doesn't handle all cases (e.g. SIZE == 3). */
1876 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1878 rtx tem = operand_subword (x, 0, 1, BLKmode);
1884 shift = expand_shift (LSHIFT_EXPR, word_mode,
1885 gen_rtx_REG (word_mode, regno),
1886 build_int_2 ((UNITS_PER_WORD - size)
1887 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1888 emit_move_insn (tem, shift);
1892 /* See if the machine can do this with a store multiple insn. */
1893 #ifdef HAVE_store_multiple
1894 if (HAVE_store_multiple)
1896 last = get_last_insn ();
1897 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1905 delete_insns_since (last);
1909 for (i = 0; i < nregs; i++)
1911 rtx tem = operand_subword (x, i, 1, BLKmode);
1916 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1920 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1921 registers represented by a PARALLEL. SSIZE represents the total size of
1922 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1924 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1925 the balance will be in what would be the low-order memory addresses, i.e.
1926 left justified for big endian, right justified for little endian. This
1927 happens to be true for the targets currently using this support. If this
1928 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1932 emit_group_load (dst, orig_src, ssize, align)
1940 if (GET_CODE (dst) != PARALLEL)
1943 /* Check for a NULL entry, used to indicate that the parameter goes
1944 both on the stack and in registers. */
1945 if (XEXP (XVECEXP (dst, 0, 0), 0))
1950 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1952 /* If we won't be loading directly from memory, protect the real source
1953 from strange tricks we might play. */
1955 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
1957 if (GET_MODE (src) == VOIDmode)
1958 src = gen_reg_rtx (GET_MODE (dst));
1960 src = gen_reg_rtx (GET_MODE (orig_src));
1961 emit_move_insn (src, orig_src);
1964 /* Process the pieces. */
1965 for (i = start; i < XVECLEN (dst, 0); i++)
1967 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1968 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1969 unsigned int bytelen = GET_MODE_SIZE (mode);
1972 /* Handle trailing fragments that run over the size of the struct. */
1973 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1975 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1976 bytelen = ssize - bytepos;
1981 /* Optimize the access just a bit. */
1982 if (GET_CODE (src) == MEM
1983 && align >= GET_MODE_ALIGNMENT (mode)
1984 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1985 && bytelen == GET_MODE_SIZE (mode))
1987 tmps[i] = gen_reg_rtx (mode);
1988 emit_move_insn (tmps[i],
1989 change_address (src, mode,
1990 plus_constant (XEXP (src, 0),
1993 else if (GET_CODE (src) == CONCAT)
1996 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1997 tmps[i] = XEXP (src, 0);
1998 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1999 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2000 tmps[i] = XEXP (src, 1);
2004 else if ((CONSTANT_P (src)
2005 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
2006 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2009 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2010 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2011 mode, mode, align, ssize);
2013 if (BYTES_BIG_ENDIAN && shift)
2014 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2015 tmps[i], 0, OPTAB_WIDEN);
2020 /* Copy the extracted pieces into the proper (probable) hard regs. */
2021 for (i = start; i < XVECLEN (dst, 0); i++)
2022 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2025 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2026 registers represented by a PARALLEL. SSIZE represents the total size of
2027 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2030 emit_group_store (orig_dst, src, ssize, align)
2038 if (GET_CODE (src) != PARALLEL)
2041 /* Check for a NULL entry, used to indicate that the parameter goes
2042 both on the stack and in registers. */
2043 if (XEXP (XVECEXP (src, 0, 0), 0))
2048 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2050 /* Copy the (probable) hard regs into pseudos. */
2051 for (i = start; i < XVECLEN (src, 0); i++)
2053 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2054 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2055 emit_move_insn (tmps[i], reg);
2059 /* If we won't be storing directly into memory, protect the real destination
2060 from strange tricks we might play. */
2062 if (GET_CODE (dst) == PARALLEL)
2066 /* We can get a PARALLEL dst if there is a conditional expression in
2067 a return statement. In that case, the dst and src are the same,
2068 so no action is necessary. */
2069 if (rtx_equal_p (dst, src))
2072 /* It is unclear if we can ever reach here, but we may as well handle
2073 it. Allocate a temporary, and split this into a store/load to/from
2076 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2077 emit_group_store (temp, src, ssize, align);
2078 emit_group_load (dst, temp, ssize, align);
2081 else if (GET_CODE (dst) != MEM)
2083 dst = gen_reg_rtx (GET_MODE (orig_dst));
2084 /* Make life a bit easier for combine. */
2085 emit_move_insn (dst, const0_rtx);
2088 /* Process the pieces. */
2089 for (i = start; i < XVECLEN (src, 0); i++)
2091 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2092 enum machine_mode mode = GET_MODE (tmps[i]);
2093 unsigned int bytelen = GET_MODE_SIZE (mode);
2095 /* Handle trailing fragments that run over the size of the struct. */
2096 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2098 if (BYTES_BIG_ENDIAN)
2100 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2101 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2102 tmps[i], 0, OPTAB_WIDEN);
2104 bytelen = ssize - bytepos;
2107 /* Optimize the access just a bit. */
2108 if (GET_CODE (dst) == MEM
2109 && align >= GET_MODE_ALIGNMENT (mode)
2110 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2111 && bytelen == GET_MODE_SIZE (mode))
2112 emit_move_insn (change_address (dst, mode,
2113 plus_constant (XEXP (dst, 0),
2117 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2118 mode, tmps[i], align, ssize);
2123 /* Copy from the pseudo into the (probable) hard reg. */
2124 if (GET_CODE (dst) == REG)
2125 emit_move_insn (orig_dst, dst);
2128 /* Generate code to copy a BLKmode object of TYPE out of a
2129 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2130 is null, a stack temporary is created. TGTBLK is returned.
2132 The primary purpose of this routine is to handle functions
2133 that return BLKmode structures in registers. Some machines
2134 (the PA for example) want to return all small structures
2135 in registers regardless of the structure's alignment. */
2138 copy_blkmode_from_reg (tgtblk, srcreg, type)
2143 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2144 rtx src = NULL, dst = NULL;
2145 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2146 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2150 tgtblk = assign_temp (build_qualified_type (type,
2152 | TYPE_QUAL_CONST)),
2154 preserve_temp_slots (tgtblk);
2157 /* This code assumes srcreg is at least a full word. If it isn't,
2158 copy it into a new pseudo which is a full word. */
2159 if (GET_MODE (srcreg) != BLKmode
2160 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2161 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2163 /* Structures whose size is not a multiple of a word are aligned
2164 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2165 machine, this means we must skip the empty high order bytes when
2166 calculating the bit offset. */
2167 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2168 big_endian_correction
2169 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2171 /* Copy the structure BITSIZE bites at a time.
2173 We could probably emit more efficient code for machines which do not use
2174 strict alignment, but it doesn't seem worth the effort at the current
2176 for (bitpos = 0, xbitpos = big_endian_correction;
2177 bitpos < bytes * BITS_PER_UNIT;
2178 bitpos += bitsize, xbitpos += bitsize)
2180 /* We need a new source operand each time xbitpos is on a
2181 word boundary and when xbitpos == big_endian_correction
2182 (the first time through). */
2183 if (xbitpos % BITS_PER_WORD == 0
2184 || xbitpos == big_endian_correction)
2185 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2187 /* We need a new destination operand each time bitpos is on
2189 if (bitpos % BITS_PER_WORD == 0)
2190 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2192 /* Use xbitpos for the source extraction (right justified) and
2193 xbitpos for the destination store (left justified). */
2194 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2195 extract_bit_field (src, bitsize,
2196 xbitpos % BITS_PER_WORD, 1,
2197 NULL_RTX, word_mode, word_mode,
2198 bitsize, BITS_PER_WORD),
2199 bitsize, BITS_PER_WORD);
2205 /* Add a USE expression for REG to the (possibly empty) list pointed
2206 to by CALL_FUSAGE. REG must denote a hard register. */
2209 use_reg (call_fusage, reg)
2210 rtx *call_fusage, reg;
2212 if (GET_CODE (reg) != REG
2213 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2217 = gen_rtx_EXPR_LIST (VOIDmode,
2218 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2221 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2222 starting at REGNO. All of these registers must be hard registers. */
2225 use_regs (call_fusage, regno, nregs)
2232 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2235 for (i = 0; i < nregs; i++)
2236 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2239 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2240 PARALLEL REGS. This is for calls that pass values in multiple
2241 non-contiguous locations. The Irix 6 ABI has examples of this. */
2244 use_group_regs (call_fusage, regs)
2250 for (i = 0; i < XVECLEN (regs, 0); i++)
2252 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2254 /* A NULL entry means the parameter goes both on the stack and in
2255 registers. This can also be a MEM for targets that pass values
2256 partially on the stack and partially in registers. */
2257 if (reg != 0 && GET_CODE (reg) == REG)
2258 use_reg (call_fusage, reg);
2264 can_store_by_pieces (len, constfun, constfundata, align)
2265 unsigned HOST_WIDE_INT len;
2266 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2270 unsigned HOST_WIDE_INT max_size, l;
2271 HOST_WIDE_INT offset = 0;
2272 enum machine_mode mode, tmode;
2273 enum insn_code icode;
2277 if (! MOVE_BY_PIECES_P (len, align))
2280 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2281 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2282 align = MOVE_MAX * BITS_PER_UNIT;
2284 /* We would first store what we can in the largest integer mode, then go to
2285 successively smaller modes. */
2288 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2293 max_size = MOVE_MAX_PIECES + 1;
2294 while (max_size > 1)
2296 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2297 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2298 if (GET_MODE_SIZE (tmode) < max_size)
2301 if (mode == VOIDmode)
2304 icode = mov_optab->handlers[(int) mode].insn_code;
2305 if (icode != CODE_FOR_nothing
2306 && align >= GET_MODE_ALIGNMENT (mode))
2308 unsigned int size = GET_MODE_SIZE (mode);
2315 cst = (*constfun) (constfundata, offset, mode);
2316 if (!LEGITIMATE_CONSTANT_P (cst))
2326 max_size = GET_MODE_SIZE (mode);
2329 /* The code above should have handled everything. */
2337 /* Generate several move instructions to store LEN bytes generated by
2338 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2339 pointer which will be passed as argument in every CONSTFUN call.
2340 ALIGN is maximum alignment we can assume. */
2343 store_by_pieces (to, len, constfun, constfundata, align)
2345 unsigned HOST_WIDE_INT len;
2346 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2350 struct store_by_pieces data;
2352 if (! MOVE_BY_PIECES_P (len, align))
2354 to = protect_from_queue (to, 1);
2355 data.constfun = constfun;
2356 data.constfundata = constfundata;
2359 store_by_pieces_1 (&data, align);
2362 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2363 rtx with BLKmode). The caller must pass TO through protect_from_queue
2364 before calling. ALIGN is maximum alignment we can assume. */
2367 clear_by_pieces (to, len, align)
2369 unsigned HOST_WIDE_INT len;
2372 struct store_by_pieces data;
2374 data.constfun = clear_by_pieces_1;
2375 data.constfundata = NULL_PTR;
2378 store_by_pieces_1 (&data, align);
2381 /* Callback routine for clear_by_pieces.
2382 Return const0_rtx unconditionally. */
2385 clear_by_pieces_1 (data, offset, mode)
2386 PTR data ATTRIBUTE_UNUSED;
2387 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2388 enum machine_mode mode ATTRIBUTE_UNUSED;
2393 /* Subroutine of clear_by_pieces and store_by_pieces.
2394 Generate several move instructions to store LEN bytes of block TO. (A MEM
2395 rtx with BLKmode). The caller must pass TO through protect_from_queue
2396 before calling. ALIGN is maximum alignment we can assume. */
2399 store_by_pieces_1 (data, align)
2400 struct store_by_pieces *data;
2403 rtx to_addr = XEXP (data->to, 0);
2404 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2405 enum machine_mode mode = VOIDmode, tmode;
2406 enum insn_code icode;
2409 data->to_addr = to_addr;
2411 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2412 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2414 data->explicit_inc_to = 0;
2416 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2418 data->offset = data->len;
2420 /* If storing requires more than two move insns,
2421 copy addresses to registers (to make displacements shorter)
2422 and use post-increment if available. */
2423 if (!data->autinc_to
2424 && move_by_pieces_ninsns (data->len, align) > 2)
2426 /* Determine the main mode we'll be using. */
2427 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2428 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2429 if (GET_MODE_SIZE (tmode) < max_size)
2432 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2434 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2435 data->autinc_to = 1;
2436 data->explicit_inc_to = -1;
2439 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2440 && ! data->autinc_to)
2442 data->to_addr = copy_addr_to_reg (to_addr);
2443 data->autinc_to = 1;
2444 data->explicit_inc_to = 1;
2447 if ( !data->autinc_to && CONSTANT_P (to_addr))
2448 data->to_addr = copy_addr_to_reg (to_addr);
2451 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2452 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2453 align = MOVE_MAX * BITS_PER_UNIT;
2455 /* First store what we can in the largest integer mode, then go to
2456 successively smaller modes. */
2458 while (max_size > 1)
2460 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2461 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2462 if (GET_MODE_SIZE (tmode) < max_size)
2465 if (mode == VOIDmode)
2468 icode = mov_optab->handlers[(int) mode].insn_code;
2469 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2470 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2472 max_size = GET_MODE_SIZE (mode);
2475 /* The code above should have handled everything. */
2480 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2481 with move instructions for mode MODE. GENFUN is the gen_... function
2482 to make a move insn for that mode. DATA has all the other info. */
2485 store_by_pieces_2 (genfun, mode, data)
2486 rtx (*genfun) PARAMS ((rtx, ...));
2487 enum machine_mode mode;
2488 struct store_by_pieces *data;
2490 unsigned int size = GET_MODE_SIZE (mode);
2493 while (data->len >= size)
2496 data->offset -= size;
2498 if (data->autinc_to)
2500 to1 = gen_rtx_MEM (mode, data->to_addr);
2501 MEM_COPY_ATTRIBUTES (to1, data->to);
2504 to1 = change_address (data->to, mode,
2505 plus_constant (data->to_addr, data->offset));
2507 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2508 emit_insn (gen_add2_insn (data->to_addr,
2509 GEN_INT (-(HOST_WIDE_INT) size)));
2511 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2512 emit_insn ((*genfun) (to1, cst));
2514 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2515 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2517 if (! data->reverse)
2518 data->offset += size;
2524 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2525 its length in bytes and ALIGN is the maximum alignment we can is has.
2527 If we call a function that returns the length of the block, return it. */
2530 clear_storage (object, size, align)
2535 #ifdef TARGET_MEM_FUNCTIONS
2537 tree call_expr, arg_list;
2541 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2542 just move a zero. Otherwise, do this a piece at a time. */
2543 if (GET_MODE (object) != BLKmode
2544 && GET_CODE (size) == CONST_INT
2545 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2546 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2549 object = protect_from_queue (object, 1);
2550 size = protect_from_queue (size, 0);
2552 if (GET_CODE (size) == CONST_INT
2553 && MOVE_BY_PIECES_P (INTVAL (size), align))
2554 clear_by_pieces (object, INTVAL (size), align);
2557 /* Try the most limited insn first, because there's no point
2558 including more than one in the machine description unless
2559 the more limited one has some advantage. */
2561 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2562 enum machine_mode mode;
2564 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2565 mode = GET_MODE_WIDER_MODE (mode))
2567 enum insn_code code = clrstr_optab[(int) mode];
2568 insn_operand_predicate_fn pred;
2570 if (code != CODE_FOR_nothing
2571 /* We don't need MODE to be narrower than
2572 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2573 the mode mask, as it is returned by the macro, it will
2574 definitely be less than the actual mode mask. */
2575 && ((GET_CODE (size) == CONST_INT
2576 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2577 <= (GET_MODE_MASK (mode) >> 1)))
2578 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2579 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2580 || (*pred) (object, BLKmode))
2581 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2582 || (*pred) (opalign, VOIDmode)))
2585 rtx last = get_last_insn ();
2588 op1 = convert_to_mode (mode, size, 1);
2589 pred = insn_data[(int) code].operand[1].predicate;
2590 if (pred != 0 && ! (*pred) (op1, mode))
2591 op1 = copy_to_mode_reg (mode, op1);
2593 pat = GEN_FCN ((int) code) (object, op1, opalign);
2600 delete_insns_since (last);
2604 /* OBJECT or SIZE may have been passed through protect_from_queue.
2606 It is unsafe to save the value generated by protect_from_queue
2607 and reuse it later. Consider what happens if emit_queue is
2608 called before the return value from protect_from_queue is used.
2610 Expansion of the CALL_EXPR below will call emit_queue before
2611 we are finished emitting RTL for argument setup. So if we are
2612 not careful we could get the wrong value for an argument.
2614 To avoid this problem we go ahead and emit code to copy OBJECT
2615 and SIZE into new pseudos. We can then place those new pseudos
2616 into an RTL_EXPR and use them later, even after a call to
2619 Note this is not strictly needed for library calls since they
2620 do not call emit_queue before loading their arguments. However,
2621 we may need to have library calls call emit_queue in the future
2622 since failing to do so could cause problems for targets which
2623 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2624 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2626 #ifdef TARGET_MEM_FUNCTIONS
2627 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2629 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2630 TREE_UNSIGNED (integer_type_node));
2631 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2634 #ifdef TARGET_MEM_FUNCTIONS
2635 /* It is incorrect to use the libcall calling conventions to call
2636 memset in this context.
2638 This could be a user call to memset and the user may wish to
2639 examine the return value from memset.
2641 For targets where libcalls and normal calls have different
2642 conventions for returning pointers, we could end up generating
2645 So instead of using a libcall sequence we build up a suitable
2646 CALL_EXPR and expand the call in the normal fashion. */
2647 if (fn == NULL_TREE)
2651 /* This was copied from except.c, I don't know if all this is
2652 necessary in this context or not. */
2653 fn = get_identifier ("memset");
2654 fntype = build_pointer_type (void_type_node);
2655 fntype = build_function_type (fntype, NULL_TREE);
2656 fn = build_decl (FUNCTION_DECL, fn, fntype);
2657 ggc_add_tree_root (&fn, 1);
2658 DECL_EXTERNAL (fn) = 1;
2659 TREE_PUBLIC (fn) = 1;
2660 DECL_ARTIFICIAL (fn) = 1;
2661 make_decl_rtl (fn, NULL_PTR);
2662 assemble_external (fn);
2665 /* We need to make an argument list for the function call.
2667 memset has three arguments, the first is a void * addresses, the
2668 second a integer with the initialization value, the last is a
2669 size_t byte count for the copy. */
2671 = build_tree_list (NULL_TREE,
2672 make_tree (build_pointer_type (void_type_node),
2674 TREE_CHAIN (arg_list)
2675 = build_tree_list (NULL_TREE,
2676 make_tree (integer_type_node, const0_rtx));
2677 TREE_CHAIN (TREE_CHAIN (arg_list))
2678 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2679 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2681 /* Now we have to build up the CALL_EXPR itself. */
2682 call_expr = build1 (ADDR_EXPR,
2683 build_pointer_type (TREE_TYPE (fn)), fn);
2684 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2685 call_expr, arg_list, NULL_TREE);
2686 TREE_SIDE_EFFECTS (call_expr) = 1;
2688 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2690 emit_library_call (bzero_libfunc, LCT_NORMAL,
2691 VOIDmode, 2, object, Pmode, size,
2692 TYPE_MODE (integer_type_node));
2700 /* Generate code to copy Y into X.
2701 Both Y and X must have the same mode, except that
2702 Y can be a constant with VOIDmode.
2703 This mode cannot be BLKmode; use emit_block_move for that.
2705 Return the last instruction emitted. */
2708 emit_move_insn (x, y)
2711 enum machine_mode mode = GET_MODE (x);
2712 rtx y_cst = NULL_RTX;
2715 x = protect_from_queue (x, 1);
2716 y = protect_from_queue (y, 0);
2718 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2721 /* Never force constant_p_rtx to memory. */
2722 if (GET_CODE (y) == CONSTANT_P_RTX)
2724 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2727 y = force_const_mem (mode, y);
2730 /* If X or Y are memory references, verify that their addresses are valid
2732 if (GET_CODE (x) == MEM
2733 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2734 && ! push_operand (x, GET_MODE (x)))
2736 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2737 x = change_address (x, VOIDmode, XEXP (x, 0));
2739 if (GET_CODE (y) == MEM
2740 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2742 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2743 y = change_address (y, VOIDmode, XEXP (y, 0));
2745 if (mode == BLKmode)
2748 last_insn = emit_move_insn_1 (x, y);
2750 if (y_cst && GET_CODE (x) == REG)
2751 REG_NOTES (last_insn)
2752 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2757 /* Low level part of emit_move_insn.
2758 Called just like emit_move_insn, but assumes X and Y
2759 are basically valid. */
2762 emit_move_insn_1 (x, y)
2765 enum machine_mode mode = GET_MODE (x);
2766 enum machine_mode submode;
2767 enum mode_class class = GET_MODE_CLASS (mode);
2770 if (mode >= MAX_MACHINE_MODE)
2773 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2775 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2777 /* Expand complex moves by moving real part and imag part, if possible. */
2778 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2779 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2781 (class == MODE_COMPLEX_INT
2782 ? MODE_INT : MODE_FLOAT),
2784 && (mov_optab->handlers[(int) submode].insn_code
2785 != CODE_FOR_nothing))
2787 /* Don't split destination if it is a stack push. */
2788 int stack = push_operand (x, GET_MODE (x));
2790 /* If this is a stack, push the highpart first, so it
2791 will be in the argument order.
2793 In that case, change_address is used only to convert
2794 the mode, not to change the address. */
2797 /* Note that the real part always precedes the imag part in memory
2798 regardless of machine's endianness. */
2799 #ifdef STACK_GROWS_DOWNWARD
2800 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2801 (gen_rtx_MEM (submode, XEXP (x, 0)),
2802 gen_imagpart (submode, y)));
2803 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2804 (gen_rtx_MEM (submode, XEXP (x, 0)),
2805 gen_realpart (submode, y)));
2807 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2808 (gen_rtx_MEM (submode, XEXP (x, 0)),
2809 gen_realpart (submode, y)));
2810 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2811 (gen_rtx_MEM (submode, XEXP (x, 0)),
2812 gen_imagpart (submode, y)));
2817 rtx realpart_x, realpart_y;
2818 rtx imagpart_x, imagpart_y;
2820 /* If this is a complex value with each part being smaller than a
2821 word, the usual calling sequence will likely pack the pieces into
2822 a single register. Unfortunately, SUBREG of hard registers only
2823 deals in terms of words, so we have a problem converting input
2824 arguments to the CONCAT of two registers that is used elsewhere
2825 for complex values. If this is before reload, we can copy it into
2826 memory and reload. FIXME, we should see about using extract and
2827 insert on integer registers, but complex short and complex char
2828 variables should be rarely used. */
2829 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2830 && (reload_in_progress | reload_completed) == 0)
2832 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2833 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2835 if (packed_dest_p || packed_src_p)
2837 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2838 ? MODE_FLOAT : MODE_INT);
2840 enum machine_mode reg_mode
2841 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2843 if (reg_mode != BLKmode)
2845 rtx mem = assign_stack_temp (reg_mode,
2846 GET_MODE_SIZE (mode), 0);
2847 rtx cmem = change_address (mem, mode, NULL_RTX);
2850 = N_("function using short complex types cannot be inline");
2854 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2855 emit_move_insn_1 (cmem, y);
2856 return emit_move_insn_1 (sreg, mem);
2860 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2861 emit_move_insn_1 (mem, sreg);
2862 return emit_move_insn_1 (x, cmem);
2868 realpart_x = gen_realpart (submode, x);
2869 realpart_y = gen_realpart (submode, y);
2870 imagpart_x = gen_imagpart (submode, x);
2871 imagpart_y = gen_imagpart (submode, y);
2873 /* Show the output dies here. This is necessary for SUBREGs
2874 of pseudos since we cannot track their lifetimes correctly;
2875 hard regs shouldn't appear here except as return values.
2876 We never want to emit such a clobber after reload. */
2878 && ! (reload_in_progress || reload_completed)
2879 && (GET_CODE (realpart_x) == SUBREG
2880 || GET_CODE (imagpart_x) == SUBREG))
2882 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2885 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2886 (realpart_x, realpart_y));
2887 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2888 (imagpart_x, imagpart_y));
2891 return get_last_insn ();
2894 /* This will handle any multi-word mode that lacks a move_insn pattern.
2895 However, you will get better code if you define such patterns,
2896 even if they must turn into multiple assembler instructions. */
2897 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2903 #ifdef PUSH_ROUNDING
2905 /* If X is a push on the stack, do the push now and replace
2906 X with a reference to the stack pointer. */
2907 if (push_operand (x, GET_MODE (x)))
2909 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2910 x = change_address (x, VOIDmode, stack_pointer_rtx);
2914 /* If we are in reload, see if either operand is a MEM whose address
2915 is scheduled for replacement. */
2916 if (reload_in_progress && GET_CODE (x) == MEM
2917 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2919 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2921 MEM_COPY_ATTRIBUTES (new, x);
2924 if (reload_in_progress && GET_CODE (y) == MEM
2925 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2927 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2929 MEM_COPY_ATTRIBUTES (new, y);
2937 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2940 rtx xpart = operand_subword (x, i, 1, mode);
2941 rtx ypart = operand_subword (y, i, 1, mode);
2943 /* If we can't get a part of Y, put Y into memory if it is a
2944 constant. Otherwise, force it into a register. If we still
2945 can't get a part of Y, abort. */
2946 if (ypart == 0 && CONSTANT_P (y))
2948 y = force_const_mem (mode, y);
2949 ypart = operand_subword (y, i, 1, mode);
2951 else if (ypart == 0)
2952 ypart = operand_subword_force (y, i, mode);
2954 if (xpart == 0 || ypart == 0)
2957 need_clobber |= (GET_CODE (xpart) == SUBREG);
2959 last_insn = emit_move_insn (xpart, ypart);
2962 seq = gen_sequence ();
2965 /* Show the output dies here. This is necessary for SUBREGs
2966 of pseudos since we cannot track their lifetimes correctly;
2967 hard regs shouldn't appear here except as return values.
2968 We never want to emit such a clobber after reload. */
2970 && ! (reload_in_progress || reload_completed)
2971 && need_clobber != 0)
2973 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2984 /* Pushing data onto the stack. */
2986 /* Push a block of length SIZE (perhaps variable)
2987 and return an rtx to address the beginning of the block.
2988 Note that it is not possible for the value returned to be a QUEUED.
2989 The value may be virtual_outgoing_args_rtx.
2991 EXTRA is the number of bytes of padding to push in addition to SIZE.
2992 BELOW nonzero means this padding comes at low addresses;
2993 otherwise, the padding comes at high addresses. */
2996 push_block (size, extra, below)
3002 size = convert_modes (Pmode, ptr_mode, size, 1);
3003 if (CONSTANT_P (size))
3004 anti_adjust_stack (plus_constant (size, extra));
3005 else if (GET_CODE (size) == REG && extra == 0)
3006 anti_adjust_stack (size);
3009 temp = copy_to_mode_reg (Pmode, size);
3011 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3012 temp, 0, OPTAB_LIB_WIDEN);
3013 anti_adjust_stack (temp);
3016 #ifndef STACK_GROWS_DOWNWARD
3017 #ifdef ARGS_GROW_DOWNWARD
3018 if (!ACCUMULATE_OUTGOING_ARGS)
3026 /* Return the lowest stack address when STACK or ARGS grow downward and
3027 we are not aaccumulating outgoing arguments (the c4x port uses such
3029 temp = virtual_outgoing_args_rtx;
3030 if (extra != 0 && below)
3031 temp = plus_constant (temp, extra);
3035 if (GET_CODE (size) == CONST_INT)
3036 temp = plus_constant (virtual_outgoing_args_rtx,
3037 -INTVAL (size) - (below ? 0 : extra));
3038 else if (extra != 0 && !below)
3039 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3040 negate_rtx (Pmode, plus_constant (size, extra)));
3042 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3043 negate_rtx (Pmode, size));
3046 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3052 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3055 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3056 block of SIZE bytes. */
3059 get_push_address (size)
3064 if (STACK_PUSH_CODE == POST_DEC)
3065 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3066 else if (STACK_PUSH_CODE == POST_INC)
3067 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3069 temp = stack_pointer_rtx;
3071 return copy_to_reg (temp);
3074 /* Generate code to push X onto the stack, assuming it has mode MODE and
3076 MODE is redundant except when X is a CONST_INT (since they don't
3078 SIZE is an rtx for the size of data to be copied (in bytes),
3079 needed only if X is BLKmode.
3081 ALIGN is maximum alignment we can assume.
3083 If PARTIAL and REG are both nonzero, then copy that many of the first
3084 words of X into registers starting with REG, and push the rest of X.
3085 The amount of space pushed is decreased by PARTIAL words,
3086 rounded *down* to a multiple of PARM_BOUNDARY.
3087 REG must be a hard register in this case.
3088 If REG is zero but PARTIAL is not, take any all others actions for an
3089 argument partially in registers, but do not actually load any
3092 EXTRA is the amount in bytes of extra space to leave next to this arg.
3093 This is ignored if an argument block has already been allocated.
3095 On a machine that lacks real push insns, ARGS_ADDR is the address of
3096 the bottom of the argument block for this call. We use indexing off there
3097 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3098 argument block has not been preallocated.
3100 ARGS_SO_FAR is the size of args previously pushed for this call.
3102 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3103 for arguments passed in registers. If nonzero, it will be the number
3104 of bytes required. */
3107 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3108 args_addr, args_so_far, reg_parm_stack_space,
3111 enum machine_mode mode;
3120 int reg_parm_stack_space;
3124 enum direction stack_direction
3125 #ifdef STACK_GROWS_DOWNWARD
3131 /* Decide where to pad the argument: `downward' for below,
3132 `upward' for above, or `none' for don't pad it.
3133 Default is below for small data on big-endian machines; else above. */
3134 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3136 /* Invert direction if stack is post-update. */
3137 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3138 if (where_pad != none)
3139 where_pad = (where_pad == downward ? upward : downward);
3141 xinner = x = protect_from_queue (x, 0);
3143 if (mode == BLKmode)
3145 /* Copy a block into the stack, entirely or partially. */
3148 int used = partial * UNITS_PER_WORD;
3149 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3157 /* USED is now the # of bytes we need not copy to the stack
3158 because registers will take care of them. */
3161 xinner = change_address (xinner, BLKmode,
3162 plus_constant (XEXP (xinner, 0), used));
3164 /* If the partial register-part of the arg counts in its stack size,
3165 skip the part of stack space corresponding to the registers.
3166 Otherwise, start copying to the beginning of the stack space,
3167 by setting SKIP to 0. */
3168 skip = (reg_parm_stack_space == 0) ? 0 : used;
3170 #ifdef PUSH_ROUNDING
3171 /* Do it with several push insns if that doesn't take lots of insns
3172 and if there is no difficulty with push insns that skip bytes
3173 on the stack for alignment purposes. */
3176 && GET_CODE (size) == CONST_INT
3178 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3179 /* Here we avoid the case of a structure whose weak alignment
3180 forces many pushes of a small amount of data,
3181 and such small pushes do rounding that causes trouble. */
3182 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3183 || align >= BIGGEST_ALIGNMENT
3184 || PUSH_ROUNDING (align) == align)
3185 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3187 /* Push padding now if padding above and stack grows down,
3188 or if padding below and stack grows up.
3189 But if space already allocated, this has already been done. */
3190 if (extra && args_addr == 0
3191 && where_pad != none && where_pad != stack_direction)
3192 anti_adjust_stack (GEN_INT (extra));
3194 stack_pointer_delta += INTVAL (size) - used;
3195 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3196 INTVAL (size) - used, align);
3198 if (current_function_check_memory_usage && ! in_check_memory_usage)
3202 in_check_memory_usage = 1;
3203 temp = get_push_address (INTVAL (size) - used);
3204 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3205 emit_library_call (chkr_copy_bitmap_libfunc,
3206 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3207 Pmode, XEXP (xinner, 0), Pmode,
3208 GEN_INT (INTVAL (size) - used),
3209 TYPE_MODE (sizetype));
3211 emit_library_call (chkr_set_right_libfunc,
3212 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3213 Pmode, GEN_INT (INTVAL (size) - used),
3214 TYPE_MODE (sizetype),
3215 GEN_INT (MEMORY_USE_RW),
3216 TYPE_MODE (integer_type_node));
3217 in_check_memory_usage = 0;
3221 #endif /* PUSH_ROUNDING */
3225 /* Otherwise make space on the stack and copy the data
3226 to the address of that space. */
3228 /* Deduct words put into registers from the size we must copy. */
3231 if (GET_CODE (size) == CONST_INT)
3232 size = GEN_INT (INTVAL (size) - used);
3234 size = expand_binop (GET_MODE (size), sub_optab, size,
3235 GEN_INT (used), NULL_RTX, 0,
3239 /* Get the address of the stack space.
3240 In this case, we do not deal with EXTRA separately.
3241 A single stack adjust will do. */
3244 temp = push_block (size, extra, where_pad == downward);
3247 else if (GET_CODE (args_so_far) == CONST_INT)
3248 temp = memory_address (BLKmode,
3249 plus_constant (args_addr,
3250 skip + INTVAL (args_so_far)));
3252 temp = memory_address (BLKmode,
3253 plus_constant (gen_rtx_PLUS (Pmode,
3257 if (current_function_check_memory_usage && ! in_check_memory_usage)
3259 in_check_memory_usage = 1;
3260 target = copy_to_reg (temp);
3261 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3262 emit_library_call (chkr_copy_bitmap_libfunc,
3263 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3265 XEXP (xinner, 0), Pmode,
3266 size, TYPE_MODE (sizetype));
3268 emit_library_call (chkr_set_right_libfunc,
3269 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3271 size, TYPE_MODE (sizetype),
3272 GEN_INT (MEMORY_USE_RW),
3273 TYPE_MODE (integer_type_node));
3274 in_check_memory_usage = 0;
3277 target = gen_rtx_MEM (BLKmode, temp);
3281 set_mem_attributes (target, type, 1);
3282 /* Function incoming arguments may overlap with sibling call
3283 outgoing arguments and we cannot allow reordering of reads
3284 from function arguments with stores to outgoing arguments
3285 of sibling calls. */
3286 MEM_ALIAS_SET (target) = 0;
3289 /* TEMP is the address of the block. Copy the data there. */
3290 if (GET_CODE (size) == CONST_INT
3291 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3293 move_by_pieces (target, xinner, INTVAL (size), align);
3298 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3299 enum machine_mode mode;
3301 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3303 mode = GET_MODE_WIDER_MODE (mode))
3305 enum insn_code code = movstr_optab[(int) mode];
3306 insn_operand_predicate_fn pred;
3308 if (code != CODE_FOR_nothing
3309 && ((GET_CODE (size) == CONST_INT
3310 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3311 <= (GET_MODE_MASK (mode) >> 1)))
3312 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3313 && (!(pred = insn_data[(int) code].operand[0].predicate)
3314 || ((*pred) (target, BLKmode)))
3315 && (!(pred = insn_data[(int) code].operand[1].predicate)
3316 || ((*pred) (xinner, BLKmode)))
3317 && (!(pred = insn_data[(int) code].operand[3].predicate)
3318 || ((*pred) (opalign, VOIDmode))))
3320 rtx op2 = convert_to_mode (mode, size, 1);
3321 rtx last = get_last_insn ();
3324 pred = insn_data[(int) code].operand[2].predicate;
3325 if (pred != 0 && ! (*pred) (op2, mode))
3326 op2 = copy_to_mode_reg (mode, op2);
3328 pat = GEN_FCN ((int) code) (target, xinner,
3336 delete_insns_since (last);
3341 if (!ACCUMULATE_OUTGOING_ARGS)
3343 /* If the source is referenced relative to the stack pointer,
3344 copy it to another register to stabilize it. We do not need
3345 to do this if we know that we won't be changing sp. */
3347 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3348 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3349 temp = copy_to_reg (temp);
3352 /* Make inhibit_defer_pop nonzero around the library call
3353 to force it to pop the bcopy-arguments right away. */
3355 #ifdef TARGET_MEM_FUNCTIONS
3356 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3357 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3358 convert_to_mode (TYPE_MODE (sizetype),
3359 size, TREE_UNSIGNED (sizetype)),
3360 TYPE_MODE (sizetype));
3362 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3363 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3364 convert_to_mode (TYPE_MODE (integer_type_node),
3366 TREE_UNSIGNED (integer_type_node)),
3367 TYPE_MODE (integer_type_node));
3372 else if (partial > 0)
3374 /* Scalar partly in registers. */
3376 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3379 /* # words of start of argument
3380 that we must make space for but need not store. */
3381 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3382 int args_offset = INTVAL (args_so_far);
3385 /* Push padding now if padding above and stack grows down,
3386 or if padding below and stack grows up.
3387 But if space already allocated, this has already been done. */
3388 if (extra && args_addr == 0
3389 && where_pad != none && where_pad != stack_direction)
3390 anti_adjust_stack (GEN_INT (extra));
3392 /* If we make space by pushing it, we might as well push
3393 the real data. Otherwise, we can leave OFFSET nonzero
3394 and leave the space uninitialized. */
3398 /* Now NOT_STACK gets the number of words that we don't need to
3399 allocate on the stack. */
3400 not_stack = partial - offset;
3402 /* If the partial register-part of the arg counts in its stack size,
3403 skip the part of stack space corresponding to the registers.
3404 Otherwise, start copying to the beginning of the stack space,
3405 by setting SKIP to 0. */
3406 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3408 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3409 x = validize_mem (force_const_mem (mode, x));
3411 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3412 SUBREGs of such registers are not allowed. */
3413 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3414 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3415 x = copy_to_reg (x);
3417 /* Loop over all the words allocated on the stack for this arg. */
3418 /* We can do it by words, because any scalar bigger than a word
3419 has a size a multiple of a word. */
3420 #ifndef PUSH_ARGS_REVERSED
3421 for (i = not_stack; i < size; i++)
3423 for (i = size - 1; i >= not_stack; i--)
3425 if (i >= not_stack + offset)
3426 emit_push_insn (operand_subword_force (x, i, mode),
3427 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3429 GEN_INT (args_offset + ((i - not_stack + skip)
3431 reg_parm_stack_space, alignment_pad);
3436 rtx target = NULL_RTX;
3439 /* Push padding now if padding above and stack grows down,
3440 or if padding below and stack grows up.
3441 But if space already allocated, this has already been done. */
3442 if (extra && args_addr == 0
3443 && where_pad != none && where_pad != stack_direction)
3444 anti_adjust_stack (GEN_INT (extra));
3446 #ifdef PUSH_ROUNDING
3447 if (args_addr == 0 && PUSH_ARGS)
3449 addr = gen_push_operand ();
3450 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3455 if (GET_CODE (args_so_far) == CONST_INT)
3457 = memory_address (mode,
3458 plus_constant (args_addr,
3459 INTVAL (args_so_far)));
3461 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3466 dest = gen_rtx_MEM (mode, addr);
3469 set_mem_attributes (dest, type, 1);
3470 /* Function incoming arguments may overlap with sibling call
3471 outgoing arguments and we cannot allow reordering of reads
3472 from function arguments with stores to outgoing arguments
3473 of sibling calls. */
3474 MEM_ALIAS_SET (dest) = 0;
3477 emit_move_insn (dest, x);
3479 if (current_function_check_memory_usage && ! in_check_memory_usage)
3481 in_check_memory_usage = 1;
3483 target = get_push_address (GET_MODE_SIZE (mode));
3485 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3486 emit_library_call (chkr_copy_bitmap_libfunc,
3487 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3488 Pmode, XEXP (x, 0), Pmode,
3489 GEN_INT (GET_MODE_SIZE (mode)),
3490 TYPE_MODE (sizetype));
3492 emit_library_call (chkr_set_right_libfunc,
3493 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3494 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3495 TYPE_MODE (sizetype),
3496 GEN_INT (MEMORY_USE_RW),
3497 TYPE_MODE (integer_type_node));
3498 in_check_memory_usage = 0;
3503 /* If part should go in registers, copy that part
3504 into the appropriate registers. Do this now, at the end,
3505 since mem-to-mem copies above may do function calls. */
3506 if (partial > 0 && reg != 0)
3508 /* Handle calls that pass values in multiple non-contiguous locations.
3509 The Irix 6 ABI has examples of this. */
3510 if (GET_CODE (reg) == PARALLEL)
3511 emit_group_load (reg, x, -1, align); /* ??? size? */
3513 move_block_to_reg (REGNO (reg), x, partial, mode);
3516 if (extra && args_addr == 0 && where_pad == stack_direction)
3517 anti_adjust_stack (GEN_INT (extra));
3519 if (alignment_pad && args_addr == 0)
3520 anti_adjust_stack (alignment_pad);
3523 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3531 /* Only registers can be subtargets. */
3532 || GET_CODE (x) != REG
3533 /* If the register is readonly, it can't be set more than once. */
3534 || RTX_UNCHANGING_P (x)
3535 /* Don't use hard regs to avoid extending their life. */
3536 || REGNO (x) < FIRST_PSEUDO_REGISTER
3537 /* Avoid subtargets inside loops,
3538 since they hide some invariant expressions. */
3539 || preserve_subexpressions_p ())
3543 /* Expand an assignment that stores the value of FROM into TO.
3544 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3545 (This may contain a QUEUED rtx;
3546 if the value is constant, this rtx is a constant.)
3547 Otherwise, the returned value is NULL_RTX.
3549 SUGGEST_REG is no longer actually used.
3550 It used to mean, copy the value through a register
3551 and return that register, if that is possible.
3552 We now use WANT_VALUE to decide whether to do this. */
3555 expand_assignment (to, from, want_value, suggest_reg)
3558 int suggest_reg ATTRIBUTE_UNUSED;
3560 register rtx to_rtx = 0;
3563 /* Don't crash if the lhs of the assignment was erroneous. */
3565 if (TREE_CODE (to) == ERROR_MARK)
3567 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3568 return want_value ? result : NULL_RTX;
3571 /* Assignment of a structure component needs special treatment
3572 if the structure component's rtx is not simply a MEM.
3573 Assignment of an array element at a constant index, and assignment of
3574 an array element in an unaligned packed structure field, has the same
3577 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3578 || TREE_CODE (to) == ARRAY_REF)
3580 enum machine_mode mode1;
3581 HOST_WIDE_INT bitsize, bitpos;
3586 unsigned int alignment;
3589 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3590 &unsignedp, &volatilep, &alignment);
3592 /* If we are going to use store_bit_field and extract_bit_field,
3593 make sure to_rtx will be safe for multiple use. */
3595 if (mode1 == VOIDmode && want_value)
3596 tem = stabilize_reference (tem);
3598 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3601 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3603 if (GET_CODE (to_rtx) != MEM)
3606 if (GET_MODE (offset_rtx) != ptr_mode)
3608 #ifdef POINTERS_EXTEND_UNSIGNED
3609 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3611 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3615 /* A constant address in TO_RTX can have VOIDmode, we must not try
3616 to call force_reg for that case. Avoid that case. */
3617 if (GET_CODE (to_rtx) == MEM
3618 && GET_MODE (to_rtx) == BLKmode
3619 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3621 && (bitpos % bitsize) == 0
3622 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3623 && alignment == GET_MODE_ALIGNMENT (mode1))
3625 rtx temp = change_address (to_rtx, mode1,
3626 plus_constant (XEXP (to_rtx, 0),
3629 if (GET_CODE (XEXP (temp, 0)) == REG)
3632 to_rtx = change_address (to_rtx, mode1,
3633 force_reg (GET_MODE (XEXP (temp, 0)),
3638 to_rtx = change_address (to_rtx, VOIDmode,
3639 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3640 force_reg (ptr_mode,
3646 if (GET_CODE (to_rtx) == MEM)
3648 /* When the offset is zero, to_rtx is the address of the
3649 structure we are storing into, and hence may be shared.
3650 We must make a new MEM before setting the volatile bit. */
3652 to_rtx = copy_rtx (to_rtx);
3654 MEM_VOLATILE_P (to_rtx) = 1;
3656 #if 0 /* This was turned off because, when a field is volatile
3657 in an object which is not volatile, the object may be in a register,
3658 and then we would abort over here. */
3664 if (TREE_CODE (to) == COMPONENT_REF
3665 && TREE_READONLY (TREE_OPERAND (to, 1)))
3668 to_rtx = copy_rtx (to_rtx);
3670 RTX_UNCHANGING_P (to_rtx) = 1;
3673 /* Check the access. */
3674 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3679 enum machine_mode best_mode;
3681 best_mode = get_best_mode (bitsize, bitpos,
3682 TYPE_ALIGN (TREE_TYPE (tem)),
3684 if (best_mode == VOIDmode)
3687 best_mode_size = GET_MODE_BITSIZE (best_mode);
3688 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3689 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3690 size *= GET_MODE_SIZE (best_mode);
3692 /* Check the access right of the pointer. */
3693 in_check_memory_usage = 1;
3695 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3696 VOIDmode, 3, to_addr, Pmode,
3697 GEN_INT (size), TYPE_MODE (sizetype),
3698 GEN_INT (MEMORY_USE_WO),
3699 TYPE_MODE (integer_type_node));
3700 in_check_memory_usage = 0;
3703 /* If this is a varying-length object, we must get the address of
3704 the source and do an explicit block move. */
3707 unsigned int from_align;
3708 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3710 = change_address (to_rtx, VOIDmode,
3711 plus_constant (XEXP (to_rtx, 0),
3712 bitpos / BITS_PER_UNIT));
3714 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3715 MIN (alignment, from_align));
3722 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3724 /* Spurious cast for HPUX compiler. */
3725 ? ((enum machine_mode)
3726 TYPE_MODE (TREE_TYPE (to)))
3730 int_size_in_bytes (TREE_TYPE (tem)),
3731 get_alias_set (to));
3733 preserve_temp_slots (result);
3737 /* If the value is meaningful, convert RESULT to the proper mode.
3738 Otherwise, return nothing. */
3739 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3740 TYPE_MODE (TREE_TYPE (from)),
3742 TREE_UNSIGNED (TREE_TYPE (to)))
3747 /* If the rhs is a function call and its value is not an aggregate,
3748 call the function before we start to compute the lhs.
3749 This is needed for correct code for cases such as
3750 val = setjmp (buf) on machines where reference to val
3751 requires loading up part of an address in a separate insn.
3753 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3754 since it might be a promoted variable where the zero- or sign- extension
3755 needs to be done. Handling this in the normal way is safe because no
3756 computation is done before the call. */
3757 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3758 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3759 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3760 && GET_CODE (DECL_RTL (to)) == REG))
3765 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3767 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3769 /* Handle calls that return values in multiple non-contiguous locations.
3770 The Irix 6 ABI has examples of this. */
3771 if (GET_CODE (to_rtx) == PARALLEL)
3772 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3773 TYPE_ALIGN (TREE_TYPE (from)));
3774 else if (GET_MODE (to_rtx) == BLKmode)
3775 emit_block_move (to_rtx, value, expr_size (from),
3776 TYPE_ALIGN (TREE_TYPE (from)));
3779 #ifdef POINTERS_EXTEND_UNSIGNED
3780 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3781 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3782 value = convert_memory_address (GET_MODE (to_rtx), value);
3784 emit_move_insn (to_rtx, value);
3786 preserve_temp_slots (to_rtx);
3789 return want_value ? to_rtx : NULL_RTX;
3792 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3793 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3797 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3798 if (GET_CODE (to_rtx) == MEM)
3799 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3802 /* Don't move directly into a return register. */
3803 if (TREE_CODE (to) == RESULT_DECL
3804 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3809 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3811 if (GET_CODE (to_rtx) == PARALLEL)
3812 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3813 TYPE_ALIGN (TREE_TYPE (from)));
3815 emit_move_insn (to_rtx, temp);
3817 preserve_temp_slots (to_rtx);
3820 return want_value ? to_rtx : NULL_RTX;
3823 /* In case we are returning the contents of an object which overlaps
3824 the place the value is being stored, use a safe function when copying
3825 a value through a pointer into a structure value return block. */
3826 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3827 && current_function_returns_struct
3828 && !current_function_returns_pcc_struct)
3833 size = expr_size (from);
3834 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3835 EXPAND_MEMORY_USE_DONT);
3837 /* Copy the rights of the bitmap. */
3838 if (current_function_check_memory_usage)
3839 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3840 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3841 XEXP (from_rtx, 0), Pmode,
3842 convert_to_mode (TYPE_MODE (sizetype),
3843 size, TREE_UNSIGNED (sizetype)),
3844 TYPE_MODE (sizetype));
3846 #ifdef TARGET_MEM_FUNCTIONS
3847 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3848 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3849 XEXP (from_rtx, 0), Pmode,
3850 convert_to_mode (TYPE_MODE (sizetype),
3851 size, TREE_UNSIGNED (sizetype)),
3852 TYPE_MODE (sizetype));
3854 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3855 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3856 XEXP (to_rtx, 0), Pmode,
3857 convert_to_mode (TYPE_MODE (integer_type_node),
3858 size, TREE_UNSIGNED (integer_type_node)),
3859 TYPE_MODE (integer_type_node));
3862 preserve_temp_slots (to_rtx);
3865 return want_value ? to_rtx : NULL_RTX;
3868 /* Compute FROM and store the value in the rtx we got. */
3871 result = store_expr (from, to_rtx, want_value);
3872 preserve_temp_slots (result);
3875 return want_value ? result : NULL_RTX;
3878 /* Generate code for computing expression EXP,
3879 and storing the value into TARGET.
3880 TARGET may contain a QUEUED rtx.
3882 If WANT_VALUE is nonzero, return a copy of the value
3883 not in TARGET, so that we can be sure to use the proper
3884 value in a containing expression even if TARGET has something
3885 else stored in it. If possible, we copy the value through a pseudo
3886 and return that pseudo. Or, if the value is constant, we try to
3887 return the constant. In some cases, we return a pseudo
3888 copied *from* TARGET.
3890 If the mode is BLKmode then we may return TARGET itself.
3891 It turns out that in BLKmode it doesn't cause a problem.
3892 because C has no operators that could combine two different
3893 assignments into the same BLKmode object with different values
3894 with no sequence point. Will other languages need this to
3897 If WANT_VALUE is 0, we return NULL, to make sure
3898 to catch quickly any cases where the caller uses the value
3899 and fails to set WANT_VALUE. */
3902 store_expr (exp, target, want_value)
3904 register rtx target;
3908 int dont_return_target = 0;
3910 if (TREE_CODE (exp) == COMPOUND_EXPR)
3912 /* Perform first part of compound expression, then assign from second
3914 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3916 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3918 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3920 /* For conditional expression, get safe form of the target. Then
3921 test the condition, doing the appropriate assignment on either
3922 side. This avoids the creation of unnecessary temporaries.
3923 For non-BLKmode, it is more efficient not to do this. */
3925 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3928 target = protect_from_queue (target, 1);
3930 do_pending_stack_adjust ();
3932 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3933 start_cleanup_deferral ();
3934 store_expr (TREE_OPERAND (exp, 1), target, 0);
3935 end_cleanup_deferral ();
3937 emit_jump_insn (gen_jump (lab2));
3940 start_cleanup_deferral ();
3941 store_expr (TREE_OPERAND (exp, 2), target, 0);
3942 end_cleanup_deferral ();
3947 return want_value ? target : NULL_RTX;
3949 else if (queued_subexp_p (target))
3950 /* If target contains a postincrement, let's not risk
3951 using it as the place to generate the rhs. */
3953 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3955 /* Expand EXP into a new pseudo. */
3956 temp = gen_reg_rtx (GET_MODE (target));
3957 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3960 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3962 /* If target is volatile, ANSI requires accessing the value
3963 *from* the target, if it is accessed. So make that happen.
3964 In no case return the target itself. */
3965 if (! MEM_VOLATILE_P (target) && want_value)
3966 dont_return_target = 1;
3968 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3969 && GET_MODE (target) != BLKmode)
3970 /* If target is in memory and caller wants value in a register instead,
3971 arrange that. Pass TARGET as target for expand_expr so that,
3972 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3973 We know expand_expr will not use the target in that case.
3974 Don't do this if TARGET is volatile because we are supposed
3975 to write it and then read it. */
3977 temp = expand_expr (exp, target, GET_MODE (target), 0);
3978 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3979 temp = copy_to_reg (temp);
3980 dont_return_target = 1;
3982 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3983 /* If this is an scalar in a register that is stored in a wider mode
3984 than the declared mode, compute the result into its declared mode
3985 and then convert to the wider mode. Our value is the computed
3988 /* If we don't want a value, we can do the conversion inside EXP,
3989 which will often result in some optimizations. Do the conversion
3990 in two steps: first change the signedness, if needed, then
3991 the extend. But don't do this if the type of EXP is a subtype
3992 of something else since then the conversion might involve
3993 more than just converting modes. */
3994 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3995 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3997 if (TREE_UNSIGNED (TREE_TYPE (exp))
3998 != SUBREG_PROMOTED_UNSIGNED_P (target))
4001 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4005 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4006 SUBREG_PROMOTED_UNSIGNED_P (target)),
4010 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4012 /* If TEMP is a volatile MEM and we want a result value, make
4013 the access now so it gets done only once. Likewise if
4014 it contains TARGET. */
4015 if (GET_CODE (temp) == MEM && want_value
4016 && (MEM_VOLATILE_P (temp)
4017 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4018 temp = copy_to_reg (temp);
4020 /* If TEMP is a VOIDmode constant, use convert_modes to make
4021 sure that we properly convert it. */
4022 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4023 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4024 TYPE_MODE (TREE_TYPE (exp)), temp,
4025 SUBREG_PROMOTED_UNSIGNED_P (target));
4027 convert_move (SUBREG_REG (target), temp,
4028 SUBREG_PROMOTED_UNSIGNED_P (target));
4030 /* If we promoted a constant, change the mode back down to match
4031 target. Otherwise, the caller might get confused by a result whose
4032 mode is larger than expected. */
4034 if (want_value && GET_MODE (temp) != GET_MODE (target)
4035 && GET_MODE (temp) != VOIDmode)
4037 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
4038 SUBREG_PROMOTED_VAR_P (temp) = 1;
4039 SUBREG_PROMOTED_UNSIGNED_P (temp)
4040 = SUBREG_PROMOTED_UNSIGNED_P (target);
4043 return want_value ? temp : NULL_RTX;
4047 temp = expand_expr (exp, target, GET_MODE (target), 0);
4048 /* Return TARGET if it's a specified hardware register.
4049 If TARGET is a volatile mem ref, either return TARGET
4050 or return a reg copied *from* TARGET; ANSI requires this.
4052 Otherwise, if TEMP is not TARGET, return TEMP
4053 if it is constant (for efficiency),
4054 or if we really want the correct value. */
4055 if (!(target && GET_CODE (target) == REG
4056 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4057 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4058 && ! rtx_equal_p (temp, target)
4059 && (CONSTANT_P (temp) || want_value))
4060 dont_return_target = 1;
4063 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4064 the same as that of TARGET, adjust the constant. This is needed, for
4065 example, in case it is a CONST_DOUBLE and we want only a word-sized
4067 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4068 && TREE_CODE (exp) != ERROR_MARK
4069 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4070 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4071 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4073 if (current_function_check_memory_usage
4074 && GET_CODE (target) == MEM
4075 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4077 in_check_memory_usage = 1;
4078 if (GET_CODE (temp) == MEM)
4079 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4080 VOIDmode, 3, XEXP (target, 0), Pmode,
4081 XEXP (temp, 0), Pmode,
4082 expr_size (exp), TYPE_MODE (sizetype));
4084 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4085 VOIDmode, 3, XEXP (target, 0), Pmode,
4086 expr_size (exp), TYPE_MODE (sizetype),
4087 GEN_INT (MEMORY_USE_WO),
4088 TYPE_MODE (integer_type_node));
4089 in_check_memory_usage = 0;
4092 /* If value was not generated in the target, store it there.
4093 Convert the value to TARGET's type first if nec. */
4094 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4095 one or both of them are volatile memory refs, we have to distinguish
4097 - expand_expr has used TARGET. In this case, we must not generate
4098 another copy. This can be detected by TARGET being equal according
4100 - expand_expr has not used TARGET - that means that the source just
4101 happens to have the same RTX form. Since temp will have been created
4102 by expand_expr, it will compare unequal according to == .
4103 We must generate a copy in this case, to reach the correct number
4104 of volatile memory references. */
4106 if ((! rtx_equal_p (temp, target)
4107 || (temp != target && (side_effects_p (temp)
4108 || side_effects_p (target))))
4109 && TREE_CODE (exp) != ERROR_MARK)
4111 target = protect_from_queue (target, 1);
4112 if (GET_MODE (temp) != GET_MODE (target)
4113 && GET_MODE (temp) != VOIDmode)
4115 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4116 if (dont_return_target)
4118 /* In this case, we will return TEMP,
4119 so make sure it has the proper mode.
4120 But don't forget to store the value into TARGET. */
4121 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4122 emit_move_insn (target, temp);
4125 convert_move (target, temp, unsignedp);
4128 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4130 /* Handle copying a string constant into an array.
4131 The string constant may be shorter than the array.
4132 So copy just the string's actual length, and clear the rest. */
4136 /* Get the size of the data type of the string,
4137 which is actually the size of the target. */
4138 size = expr_size (exp);
4139 if (GET_CODE (size) == CONST_INT
4140 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4141 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4144 /* Compute the size of the data to copy from the string. */
4146 = size_binop (MIN_EXPR,
4147 make_tree (sizetype, size),
4148 size_int (TREE_STRING_LENGTH (exp)));
4149 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4150 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4154 /* Copy that much. */
4155 emit_block_move (target, temp, copy_size_rtx,
4156 TYPE_ALIGN (TREE_TYPE (exp)));
4158 /* Figure out how much is left in TARGET that we have to clear.
4159 Do all calculations in ptr_mode. */
4161 addr = XEXP (target, 0);
4162 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4164 if (GET_CODE (copy_size_rtx) == CONST_INT)
4166 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4167 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4169 (unsigned int) (BITS_PER_UNIT
4170 * (INTVAL (copy_size_rtx)
4171 & - INTVAL (copy_size_rtx))));
4175 addr = force_reg (ptr_mode, addr);
4176 addr = expand_binop (ptr_mode, add_optab, addr,
4177 copy_size_rtx, NULL_RTX, 0,
4180 size = expand_binop (ptr_mode, sub_optab, size,
4181 copy_size_rtx, NULL_RTX, 0,
4184 align = BITS_PER_UNIT;
4185 label = gen_label_rtx ();
4186 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4187 GET_MODE (size), 0, 0, label);
4189 align = MIN (align, expr_align (copy_size));
4191 if (size != const0_rtx)
4193 rtx dest = gen_rtx_MEM (BLKmode, addr);
4195 MEM_COPY_ATTRIBUTES (dest, target);
4197 /* Be sure we can write on ADDR. */
4198 in_check_memory_usage = 1;
4199 if (current_function_check_memory_usage)
4200 emit_library_call (chkr_check_addr_libfunc,
4201 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4203 size, TYPE_MODE (sizetype),
4204 GEN_INT (MEMORY_USE_WO),
4205 TYPE_MODE (integer_type_node));
4206 in_check_memory_usage = 0;
4207 clear_storage (dest, size, align);
4214 /* Handle calls that return values in multiple non-contiguous locations.
4215 The Irix 6 ABI has examples of this. */
4216 else if (GET_CODE (target) == PARALLEL)
4217 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4218 TYPE_ALIGN (TREE_TYPE (exp)));
4219 else if (GET_MODE (temp) == BLKmode)
4220 emit_block_move (target, temp, expr_size (exp),
4221 TYPE_ALIGN (TREE_TYPE (exp)));
4223 emit_move_insn (target, temp);
4226 /* If we don't want a value, return NULL_RTX. */
4230 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4231 ??? The latter test doesn't seem to make sense. */
4232 else if (dont_return_target && GET_CODE (temp) != MEM)
4235 /* Return TARGET itself if it is a hard register. */
4236 else if (want_value && GET_MODE (target) != BLKmode
4237 && ! (GET_CODE (target) == REG
4238 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4239 return copy_to_reg (target);
4245 /* Return 1 if EXP just contains zeros. */
4253 switch (TREE_CODE (exp))
4257 case NON_LVALUE_EXPR:
4258 return is_zeros_p (TREE_OPERAND (exp, 0));
4261 return integer_zerop (exp);
4265 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4268 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4271 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4272 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4273 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4274 if (! is_zeros_p (TREE_VALUE (elt)))
4284 /* Return 1 if EXP contains mostly (3/4) zeros. */
4287 mostly_zeros_p (exp)
4290 if (TREE_CODE (exp) == CONSTRUCTOR)
4292 int elts = 0, zeros = 0;
4293 tree elt = CONSTRUCTOR_ELTS (exp);
4294 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4296 /* If there are no ranges of true bits, it is all zero. */
4297 return elt == NULL_TREE;
4299 for (; elt; elt = TREE_CHAIN (elt))
4301 /* We do not handle the case where the index is a RANGE_EXPR,
4302 so the statistic will be somewhat inaccurate.
4303 We do make a more accurate count in store_constructor itself,
4304 so since this function is only used for nested array elements,
4305 this should be close enough. */
4306 if (mostly_zeros_p (TREE_VALUE (elt)))
4311 return 4 * zeros >= 3 * elts;
4314 return is_zeros_p (exp);
4317 /* Helper function for store_constructor.
4318 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4319 TYPE is the type of the CONSTRUCTOR, not the element type.
4320 ALIGN and CLEARED are as for store_constructor.
4321 ALIAS_SET is the alias set to use for any stores.
4323 This provides a recursive shortcut back to store_constructor when it isn't
4324 necessary to go through store_field. This is so that we can pass through
4325 the cleared field to let store_constructor know that we may not have to
4326 clear a substructure if the outer structure has already been cleared. */
4329 store_constructor_field (target, bitsize, bitpos,
4330 mode, exp, type, align, cleared, alias_set)
4332 unsigned HOST_WIDE_INT bitsize;
4333 HOST_WIDE_INT bitpos;
4334 enum machine_mode mode;
4340 if (TREE_CODE (exp) == CONSTRUCTOR
4341 && bitpos % BITS_PER_UNIT == 0
4342 /* If we have a non-zero bitpos for a register target, then we just
4343 let store_field do the bitfield handling. This is unlikely to
4344 generate unnecessary clear instructions anyways. */
4345 && (bitpos == 0 || GET_CODE (target) == MEM))
4349 = change_address (target,
4350 GET_MODE (target) == BLKmode
4352 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4353 ? BLKmode : VOIDmode,
4354 plus_constant (XEXP (target, 0),
4355 bitpos / BITS_PER_UNIT));
4358 /* Show the alignment may no longer be what it was and update the alias
4359 set, if required. */
4361 align = MIN (align, (unsigned int) bitpos & - bitpos);
4362 if (GET_CODE (target) == MEM)
4363 MEM_ALIAS_SET (target) = alias_set;
4365 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4368 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4369 int_size_in_bytes (type), alias_set);
4372 /* Store the value of constructor EXP into the rtx TARGET.
4373 TARGET is either a REG or a MEM.
4374 ALIGN is the maximum known alignment for TARGET.
4375 CLEARED is true if TARGET is known to have been zero'd.
4376 SIZE is the number of bytes of TARGET we are allowed to modify: this
4377 may not be the same as the size of EXP if we are assigning to a field
4378 which has been packed to exclude padding bits. */
4381 store_constructor (exp, target, align, cleared, size)
4388 tree type = TREE_TYPE (exp);
4389 #ifdef WORD_REGISTER_OPERATIONS
4390 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4393 /* We know our target cannot conflict, since safe_from_p has been called. */
4395 /* Don't try copying piece by piece into a hard register
4396 since that is vulnerable to being clobbered by EXP.
4397 Instead, construct in a pseudo register and then copy it all. */
4398 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4400 rtx temp = gen_reg_rtx (GET_MODE (target));
4401 store_constructor (exp, temp, align, cleared, size);
4402 emit_move_insn (target, temp);
4407 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4408 || TREE_CODE (type) == QUAL_UNION_TYPE)
4412 /* Inform later passes that the whole union value is dead. */
4413 if ((TREE_CODE (type) == UNION_TYPE
4414 || TREE_CODE (type) == QUAL_UNION_TYPE)
4417 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4419 /* If the constructor is empty, clear the union. */
4420 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4421 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4424 /* If we are building a static constructor into a register,
4425 set the initial value as zero so we can fold the value into
4426 a constant. But if more than one register is involved,
4427 this probably loses. */
4428 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4429 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4432 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4437 /* If the constructor has fewer fields than the structure
4438 or if we are initializing the structure to mostly zeros,
4439 clear the whole structure first. Don't do this is TARGET is
4440 register whose mode size isn't equal to SIZE since clear_storage
4441 can't handle this case. */
4443 && ((list_length (CONSTRUCTOR_ELTS (exp))
4444 != fields_length (type))
4445 || mostly_zeros_p (exp))
4446 && (GET_CODE (target) != REG
4447 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4450 clear_storage (target, GEN_INT (size), align);
4455 /* Inform later passes that the old value is dead. */
4456 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4458 /* Store each element of the constructor into
4459 the corresponding field of TARGET. */
4461 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4463 register tree field = TREE_PURPOSE (elt);
4464 #ifdef WORD_REGISTER_OPERATIONS
4465 tree value = TREE_VALUE (elt);
4467 register enum machine_mode mode;
4468 HOST_WIDE_INT bitsize;
4469 HOST_WIDE_INT bitpos = 0;
4472 rtx to_rtx = target;
4474 /* Just ignore missing fields.
4475 We cleared the whole structure, above,
4476 if any fields are missing. */
4480 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4483 if (host_integerp (DECL_SIZE (field), 1))
4484 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4488 unsignedp = TREE_UNSIGNED (field);
4489 mode = DECL_MODE (field);
4490 if (DECL_BIT_FIELD (field))
4493 offset = DECL_FIELD_OFFSET (field);
4494 if (host_integerp (offset, 0)
4495 && host_integerp (bit_position (field), 0))
4497 bitpos = int_bit_position (field);
4501 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4507 if (contains_placeholder_p (offset))
4508 offset = build (WITH_RECORD_EXPR, sizetype,
4509 offset, make_tree (TREE_TYPE (exp), target));
4511 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4512 if (GET_CODE (to_rtx) != MEM)
4515 if (GET_MODE (offset_rtx) != ptr_mode)
4517 #ifdef POINTERS_EXTEND_UNSIGNED
4518 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4520 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4525 = change_address (to_rtx, VOIDmode,
4526 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4527 force_reg (ptr_mode,
4529 align = DECL_OFFSET_ALIGN (field);
4532 if (TREE_READONLY (field))
4534 if (GET_CODE (to_rtx) == MEM)
4535 to_rtx = copy_rtx (to_rtx);
4537 RTX_UNCHANGING_P (to_rtx) = 1;
4540 #ifdef WORD_REGISTER_OPERATIONS
4541 /* If this initializes a field that is smaller than a word, at the
4542 start of a word, try to widen it to a full word.
4543 This special case allows us to output C++ member function
4544 initializations in a form that the optimizers can understand. */
4545 if (GET_CODE (target) == REG
4546 && bitsize < BITS_PER_WORD
4547 && bitpos % BITS_PER_WORD == 0
4548 && GET_MODE_CLASS (mode) == MODE_INT
4549 && TREE_CODE (value) == INTEGER_CST
4551 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4553 tree type = TREE_TYPE (value);
4554 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4556 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4557 value = convert (type, value);
4559 if (BYTES_BIG_ENDIAN)
4561 = fold (build (LSHIFT_EXPR, type, value,
4562 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4563 bitsize = BITS_PER_WORD;
4567 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4568 TREE_VALUE (elt), type, align, cleared,
4569 (DECL_NONADDRESSABLE_P (field)
4570 && GET_CODE (to_rtx) == MEM)
4571 ? MEM_ALIAS_SET (to_rtx)
4572 : get_alias_set (TREE_TYPE (field)));
4575 else if (TREE_CODE (type) == ARRAY_TYPE)
4580 tree domain = TYPE_DOMAIN (type);
4581 tree elttype = TREE_TYPE (type);
4582 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4583 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4584 HOST_WIDE_INT minelt;
4585 HOST_WIDE_INT maxelt;
4587 /* If we have constant bounds for the range of the type, get them. */
4590 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4591 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4594 /* If the constructor has fewer elements than the array,
4595 clear the whole array first. Similarly if this is
4596 static constructor of a non-BLKmode object. */
4597 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4601 HOST_WIDE_INT count = 0, zero_count = 0;
4602 need_to_clear = ! const_bounds_p;
4604 /* This loop is a more accurate version of the loop in
4605 mostly_zeros_p (it handles RANGE_EXPR in an index).
4606 It is also needed to check for missing elements. */
4607 for (elt = CONSTRUCTOR_ELTS (exp);
4608 elt != NULL_TREE && ! need_to_clear;
4609 elt = TREE_CHAIN (elt))
4611 tree index = TREE_PURPOSE (elt);
4612 HOST_WIDE_INT this_node_count;
4614 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4616 tree lo_index = TREE_OPERAND (index, 0);
4617 tree hi_index = TREE_OPERAND (index, 1);
4619 if (! host_integerp (lo_index, 1)
4620 || ! host_integerp (hi_index, 1))
4626 this_node_count = (tree_low_cst (hi_index, 1)
4627 - tree_low_cst (lo_index, 1) + 1);
4630 this_node_count = 1;
4632 count += this_node_count;
4633 if (mostly_zeros_p (TREE_VALUE (elt)))
4634 zero_count += this_node_count;
4637 /* Clear the entire array first if there are any missing elements,
4638 or if the incidence of zero elements is >= 75%. */
4640 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4644 if (need_to_clear && size > 0)
4647 clear_storage (target, GEN_INT (size), align);
4651 /* Inform later passes that the old value is dead. */
4652 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4654 /* Store each element of the constructor into
4655 the corresponding element of TARGET, determined
4656 by counting the elements. */
4657 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4659 elt = TREE_CHAIN (elt), i++)
4661 register enum machine_mode mode;
4662 HOST_WIDE_INT bitsize;
4663 HOST_WIDE_INT bitpos;
4665 tree value = TREE_VALUE (elt);
4666 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4667 tree index = TREE_PURPOSE (elt);
4668 rtx xtarget = target;
4670 if (cleared && is_zeros_p (value))
4673 unsignedp = TREE_UNSIGNED (elttype);
4674 mode = TYPE_MODE (elttype);
4675 if (mode == BLKmode)
4676 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4677 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4680 bitsize = GET_MODE_BITSIZE (mode);
4682 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4684 tree lo_index = TREE_OPERAND (index, 0);
4685 tree hi_index = TREE_OPERAND (index, 1);
4686 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4687 struct nesting *loop;
4688 HOST_WIDE_INT lo, hi, count;
4691 /* If the range is constant and "small", unroll the loop. */
4693 && host_integerp (lo_index, 0)
4694 && host_integerp (hi_index, 0)
4695 && (lo = tree_low_cst (lo_index, 0),
4696 hi = tree_low_cst (hi_index, 0),
4697 count = hi - lo + 1,
4698 (GET_CODE (target) != MEM
4700 || (host_integerp (TYPE_SIZE (elttype), 1)
4701 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4704 lo -= minelt; hi -= minelt;
4705 for (; lo <= hi; lo++)
4707 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4708 store_constructor_field
4709 (target, bitsize, bitpos, mode, value, type, align,
4711 TYPE_NONALIASED_COMPONENT (type)
4712 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4717 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4718 loop_top = gen_label_rtx ();
4719 loop_end = gen_label_rtx ();
4721 unsignedp = TREE_UNSIGNED (domain);
4723 index = build_decl (VAR_DECL, NULL_TREE, domain);
4725 DECL_RTL (index) = index_r
4726 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4729 if (TREE_CODE (value) == SAVE_EXPR
4730 && SAVE_EXPR_RTL (value) == 0)
4732 /* Make sure value gets expanded once before the
4734 expand_expr (value, const0_rtx, VOIDmode, 0);
4737 store_expr (lo_index, index_r, 0);
4738 loop = expand_start_loop (0);
4740 /* Assign value to element index. */
4742 = convert (ssizetype,
4743 fold (build (MINUS_EXPR, TREE_TYPE (index),
4744 index, TYPE_MIN_VALUE (domain))));
4745 position = size_binop (MULT_EXPR, position,
4747 TYPE_SIZE_UNIT (elttype)));
4749 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4750 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4751 xtarget = change_address (target, mode, addr);
4752 if (TREE_CODE (value) == CONSTRUCTOR)
4753 store_constructor (value, xtarget, align, cleared,
4754 bitsize / BITS_PER_UNIT);
4756 store_expr (value, xtarget, 0);
4758 expand_exit_loop_if_false (loop,
4759 build (LT_EXPR, integer_type_node,
4762 expand_increment (build (PREINCREMENT_EXPR,
4764 index, integer_one_node), 0, 0);
4766 emit_label (loop_end);
4769 else if ((index != 0 && ! host_integerp (index, 0))
4770 || ! host_integerp (TYPE_SIZE (elttype), 1))
4776 index = ssize_int (1);
4779 index = convert (ssizetype,
4780 fold (build (MINUS_EXPR, index,
4781 TYPE_MIN_VALUE (domain))));
4783 position = size_binop (MULT_EXPR, index,
4785 TYPE_SIZE_UNIT (elttype)));
4786 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4787 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4788 xtarget = change_address (target, mode, addr);
4789 store_expr (value, xtarget, 0);
4794 bitpos = ((tree_low_cst (index, 0) - minelt)
4795 * tree_low_cst (TYPE_SIZE (elttype), 1));
4797 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4799 store_constructor_field (target, bitsize, bitpos, mode, value,
4800 type, align, cleared,
4801 TYPE_NONALIASED_COMPONENT (type)
4802 && GET_CODE (target) == MEM
4803 ? MEM_ALIAS_SET (target) :
4804 get_alias_set (elttype));
4810 /* Set constructor assignments. */
4811 else if (TREE_CODE (type) == SET_TYPE)
4813 tree elt = CONSTRUCTOR_ELTS (exp);
4814 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4815 tree domain = TYPE_DOMAIN (type);
4816 tree domain_min, domain_max, bitlength;
4818 /* The default implementation strategy is to extract the constant
4819 parts of the constructor, use that to initialize the target,
4820 and then "or" in whatever non-constant ranges we need in addition.
4822 If a large set is all zero or all ones, it is
4823 probably better to set it using memset (if available) or bzero.
4824 Also, if a large set has just a single range, it may also be
4825 better to first clear all the first clear the set (using
4826 bzero/memset), and set the bits we want. */
4828 /* Check for all zeros. */
4829 if (elt == NULL_TREE && size > 0)
4832 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4836 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4837 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4838 bitlength = size_binop (PLUS_EXPR,
4839 size_diffop (domain_max, domain_min),
4842 nbits = tree_low_cst (bitlength, 1);
4844 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4845 are "complicated" (more than one range), initialize (the
4846 constant parts) by copying from a constant. */
4847 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4848 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4850 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4851 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4852 char *bit_buffer = (char *) alloca (nbits);
4853 HOST_WIDE_INT word = 0;
4854 unsigned int bit_pos = 0;
4855 unsigned int ibit = 0;
4856 unsigned int offset = 0; /* In bytes from beginning of set. */
4858 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4861 if (bit_buffer[ibit])
4863 if (BYTES_BIG_ENDIAN)
4864 word |= (1 << (set_word_size - 1 - bit_pos));
4866 word |= 1 << bit_pos;
4870 if (bit_pos >= set_word_size || ibit == nbits)
4872 if (word != 0 || ! cleared)
4874 rtx datum = GEN_INT (word);
4877 /* The assumption here is that it is safe to use
4878 XEXP if the set is multi-word, but not if
4879 it's single-word. */
4880 if (GET_CODE (target) == MEM)
4882 to_rtx = plus_constant (XEXP (target, 0), offset);
4883 to_rtx = change_address (target, mode, to_rtx);
4885 else if (offset == 0)
4889 emit_move_insn (to_rtx, datum);
4896 offset += set_word_size / BITS_PER_UNIT;
4901 /* Don't bother clearing storage if the set is all ones. */
4902 if (TREE_CHAIN (elt) != NULL_TREE
4903 || (TREE_PURPOSE (elt) == NULL_TREE
4905 : ( ! host_integerp (TREE_VALUE (elt), 0)
4906 || ! host_integerp (TREE_PURPOSE (elt), 0)
4907 || (tree_low_cst (TREE_VALUE (elt), 0)
4908 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4909 != (HOST_WIDE_INT) nbits))))
4910 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4912 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4914 /* Start of range of element or NULL. */
4915 tree startbit = TREE_PURPOSE (elt);
4916 /* End of range of element, or element value. */
4917 tree endbit = TREE_VALUE (elt);
4918 #ifdef TARGET_MEM_FUNCTIONS
4919 HOST_WIDE_INT startb, endb;
4921 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4923 bitlength_rtx = expand_expr (bitlength,
4924 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4926 /* Handle non-range tuple element like [ expr ]. */
4927 if (startbit == NULL_TREE)
4929 startbit = save_expr (endbit);
4933 startbit = convert (sizetype, startbit);
4934 endbit = convert (sizetype, endbit);
4935 if (! integer_zerop (domain_min))
4937 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4938 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4940 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4941 EXPAND_CONST_ADDRESS);
4942 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4943 EXPAND_CONST_ADDRESS);
4949 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4952 emit_move_insn (targetx, target);
4955 else if (GET_CODE (target) == MEM)
4960 #ifdef TARGET_MEM_FUNCTIONS
4961 /* Optimization: If startbit and endbit are
4962 constants divisible by BITS_PER_UNIT,
4963 call memset instead. */
4964 if (TREE_CODE (startbit) == INTEGER_CST
4965 && TREE_CODE (endbit) == INTEGER_CST
4966 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4967 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4969 emit_library_call (memset_libfunc, LCT_NORMAL,
4971 plus_constant (XEXP (targetx, 0),
4972 startb / BITS_PER_UNIT),
4974 constm1_rtx, TYPE_MODE (integer_type_node),
4975 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4976 TYPE_MODE (sizetype));
4980 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4981 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4982 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4983 startbit_rtx, TYPE_MODE (sizetype),
4984 endbit_rtx, TYPE_MODE (sizetype));
4987 emit_move_insn (target, targetx);
4995 /* Store the value of EXP (an expression tree)
4996 into a subfield of TARGET which has mode MODE and occupies
4997 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4998 If MODE is VOIDmode, it means that we are storing into a bit-field.
5000 If VALUE_MODE is VOIDmode, return nothing in particular.
5001 UNSIGNEDP is not used in this case.
5003 Otherwise, return an rtx for the value stored. This rtx
5004 has mode VALUE_MODE if that is convenient to do.
5005 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5007 ALIGN is the alignment that TARGET is known to have.
5008 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5010 ALIAS_SET is the alias set for the destination. This value will
5011 (in general) be different from that for TARGET, since TARGET is a
5012 reference to the containing structure. */
5015 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5016 unsignedp, align, total_size, alias_set)
5018 HOST_WIDE_INT bitsize;
5019 HOST_WIDE_INT bitpos;
5020 enum machine_mode mode;
5022 enum machine_mode value_mode;
5025 HOST_WIDE_INT total_size;
5028 HOST_WIDE_INT width_mask = 0;
5030 if (TREE_CODE (exp) == ERROR_MARK)
5033 if (bitsize < HOST_BITS_PER_WIDE_INT)
5034 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5036 /* If we are storing into an unaligned field of an aligned union that is
5037 in a register, we may have the mode of TARGET being an integer mode but
5038 MODE == BLKmode. In that case, get an aligned object whose size and
5039 alignment are the same as TARGET and store TARGET into it (we can avoid
5040 the store if the field being stored is the entire width of TARGET). Then
5041 call ourselves recursively to store the field into a BLKmode version of
5042 that object. Finally, load from the object into TARGET. This is not
5043 very efficient in general, but should only be slightly more expensive
5044 than the otherwise-required unaligned accesses. Perhaps this can be
5045 cleaned up later. */
5048 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5052 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5055 rtx blk_object = copy_rtx (object);
5057 PUT_MODE (blk_object, BLKmode);
5059 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5060 emit_move_insn (object, target);
5062 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5063 align, total_size, alias_set);
5065 /* Even though we aren't returning target, we need to
5066 give it the updated value. */
5067 emit_move_insn (target, object);
5072 if (GET_CODE (target) == CONCAT)
5074 /* We're storing into a struct containing a single __complex. */
5078 return store_expr (exp, target, 0);
5081 /* If the structure is in a register or if the component
5082 is a bit field, we cannot use addressing to access it.
5083 Use bit-field techniques or SUBREG to store in it. */
5085 if (mode == VOIDmode
5086 || (mode != BLKmode && ! direct_store[(int) mode]
5087 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5088 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5089 || GET_CODE (target) == REG
5090 || GET_CODE (target) == SUBREG
5091 /* If the field isn't aligned enough to store as an ordinary memref,
5092 store it as a bit field. */
5093 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5094 && (align < GET_MODE_ALIGNMENT (mode)
5095 || bitpos % GET_MODE_ALIGNMENT (mode)))
5096 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5097 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5098 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5099 /* If the RHS and field are a constant size and the size of the
5100 RHS isn't the same size as the bitfield, we must use bitfield
5103 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5104 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5106 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5108 /* If BITSIZE is narrower than the size of the type of EXP
5109 we will be narrowing TEMP. Normally, what's wanted are the
5110 low-order bits. However, if EXP's type is a record and this is
5111 big-endian machine, we want the upper BITSIZE bits. */
5112 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5113 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5114 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5115 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5116 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5120 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5122 if (mode != VOIDmode && mode != BLKmode
5123 && mode != TYPE_MODE (TREE_TYPE (exp)))
5124 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5126 /* If the modes of TARGET and TEMP are both BLKmode, both
5127 must be in memory and BITPOS must be aligned on a byte
5128 boundary. If so, we simply do a block copy. */
5129 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5131 unsigned int exp_align = expr_align (exp);
5133 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5134 || bitpos % BITS_PER_UNIT != 0)
5137 target = change_address (target, VOIDmode,
5138 plus_constant (XEXP (target, 0),
5139 bitpos / BITS_PER_UNIT));
5141 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5142 align = MIN (exp_align, align);
5144 /* Find an alignment that is consistent with the bit position. */
5145 while ((bitpos % align) != 0)
5148 emit_block_move (target, temp,
5149 bitsize == -1 ? expr_size (exp)
5150 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5154 return value_mode == VOIDmode ? const0_rtx : target;
5157 /* Store the value in the bitfield. */
5158 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5159 if (value_mode != VOIDmode)
5161 /* The caller wants an rtx for the value. */
5162 /* If possible, avoid refetching from the bitfield itself. */
5164 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5167 enum machine_mode tmode;
5170 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5171 tmode = GET_MODE (temp);
5172 if (tmode == VOIDmode)
5174 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5175 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5176 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5178 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5179 NULL_RTX, value_mode, 0, align,
5186 rtx addr = XEXP (target, 0);
5189 /* If a value is wanted, it must be the lhs;
5190 so make the address stable for multiple use. */
5192 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5193 && ! CONSTANT_ADDRESS_P (addr)
5194 /* A frame-pointer reference is already stable. */
5195 && ! (GET_CODE (addr) == PLUS
5196 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5197 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5198 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5199 addr = copy_to_reg (addr);
5201 /* Now build a reference to just the desired component. */
5203 to_rtx = copy_rtx (change_address (target, mode,
5204 plus_constant (addr,
5206 / BITS_PER_UNIT))));
5207 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5208 MEM_ALIAS_SET (to_rtx) = alias_set;
5210 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5214 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5215 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5216 ARRAY_REFs and find the ultimate containing object, which we return.
5218 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5219 bit position, and *PUNSIGNEDP to the signedness of the field.
5220 If the position of the field is variable, we store a tree
5221 giving the variable offset (in units) in *POFFSET.
5222 This offset is in addition to the bit position.
5223 If the position is not variable, we store 0 in *POFFSET.
5224 We set *PALIGNMENT to the alignment of the address that will be
5225 computed. This is the alignment of the thing we return if *POFFSET
5226 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5228 If any of the extraction expressions is volatile,
5229 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5231 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5232 is a mode that can be used to access the field. In that case, *PBITSIZE
5235 If the field describes a variable-sized object, *PMODE is set to
5236 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5237 this case, but the address of the object can be found. */
5240 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5241 punsignedp, pvolatilep, palignment)
5243 HOST_WIDE_INT *pbitsize;
5244 HOST_WIDE_INT *pbitpos;
5246 enum machine_mode *pmode;
5249 unsigned int *palignment;
5252 enum machine_mode mode = VOIDmode;
5253 tree offset = size_zero_node;
5254 tree bit_offset = bitsize_zero_node;
5255 unsigned int alignment = BIGGEST_ALIGNMENT;
5258 /* First get the mode, signedness, and size. We do this from just the
5259 outermost expression. */
5260 if (TREE_CODE (exp) == COMPONENT_REF)
5262 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5263 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5264 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5266 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5268 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5270 size_tree = TREE_OPERAND (exp, 1);
5271 *punsignedp = TREE_UNSIGNED (exp);
5275 mode = TYPE_MODE (TREE_TYPE (exp));
5276 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5278 if (mode == BLKmode)
5279 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5281 *pbitsize = GET_MODE_BITSIZE (mode);
5286 if (! host_integerp (size_tree, 1))
5287 mode = BLKmode, *pbitsize = -1;
5289 *pbitsize = tree_low_cst (size_tree, 1);
5292 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5293 and find the ultimate containing object. */
5296 if (TREE_CODE (exp) == BIT_FIELD_REF)
5297 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5298 else if (TREE_CODE (exp) == COMPONENT_REF)
5300 tree field = TREE_OPERAND (exp, 1);
5301 tree this_offset = DECL_FIELD_OFFSET (field);
5303 /* If this field hasn't been filled in yet, don't go
5304 past it. This should only happen when folding expressions
5305 made during type construction. */
5306 if (this_offset == 0)
5308 else if (! TREE_CONSTANT (this_offset)
5309 && contains_placeholder_p (this_offset))
5310 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5312 offset = size_binop (PLUS_EXPR, offset, this_offset);
5313 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5314 DECL_FIELD_BIT_OFFSET (field));
5316 if (! host_integerp (offset, 0))
5317 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5320 else if (TREE_CODE (exp) == ARRAY_REF)
5322 tree index = TREE_OPERAND (exp, 1);
5323 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5324 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5325 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5327 /* We assume all arrays have sizes that are a multiple of a byte.
5328 First subtract the lower bound, if any, in the type of the
5329 index, then convert to sizetype and multiply by the size of the
5331 if (low_bound != 0 && ! integer_zerop (low_bound))
5332 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5335 /* If the index has a self-referential type, pass it to a
5336 WITH_RECORD_EXPR; if the component size is, pass our
5337 component to one. */
5338 if (! TREE_CONSTANT (index)
5339 && contains_placeholder_p (index))
5340 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5341 if (! TREE_CONSTANT (unit_size)
5342 && contains_placeholder_p (unit_size))
5343 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5344 TREE_OPERAND (exp, 0));
5346 offset = size_binop (PLUS_EXPR, offset,
5347 size_binop (MULT_EXPR,
5348 convert (sizetype, index),
5352 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5353 && ! ((TREE_CODE (exp) == NOP_EXPR
5354 || TREE_CODE (exp) == CONVERT_EXPR)
5355 && (TYPE_MODE (TREE_TYPE (exp))
5356 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5359 /* If any reference in the chain is volatile, the effect is volatile. */
5360 if (TREE_THIS_VOLATILE (exp))
5363 /* If the offset is non-constant already, then we can't assume any
5364 alignment more than the alignment here. */
5365 if (! TREE_CONSTANT (offset))
5366 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5368 exp = TREE_OPERAND (exp, 0);
5372 alignment = MIN (alignment, DECL_ALIGN (exp));
5373 else if (TREE_TYPE (exp) != 0)
5374 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5376 /* If OFFSET is constant, see if we can return the whole thing as a
5377 constant bit position. Otherwise, split it up. */
5378 if (host_integerp (offset, 0)
5379 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5381 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5382 && host_integerp (tem, 0))
5383 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5385 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5388 *palignment = alignment;
5392 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5394 static enum memory_use_mode
5395 get_memory_usage_from_modifier (modifier)
5396 enum expand_modifier modifier;
5402 return MEMORY_USE_RO;
5404 case EXPAND_MEMORY_USE_WO:
5405 return MEMORY_USE_WO;
5407 case EXPAND_MEMORY_USE_RW:
5408 return MEMORY_USE_RW;
5410 case EXPAND_MEMORY_USE_DONT:
5411 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5412 MEMORY_USE_DONT, because they are modifiers to a call of
5413 expand_expr in the ADDR_EXPR case of expand_expr. */
5414 case EXPAND_CONST_ADDRESS:
5415 case EXPAND_INITIALIZER:
5416 return MEMORY_USE_DONT;
5417 case EXPAND_MEMORY_USE_BAD:
5423 /* Given an rtx VALUE that may contain additions and multiplications,
5424 return an equivalent value that just refers to a register or memory.
5425 This is done by generating instructions to perform the arithmetic
5426 and returning a pseudo-register containing the value.
5428 The returned value may be a REG, SUBREG, MEM or constant. */
5431 force_operand (value, target)
5434 register optab binoptab = 0;
5435 /* Use a temporary to force order of execution of calls to
5439 /* Use subtarget as the target for operand 0 of a binary operation. */
5440 register rtx subtarget = get_subtarget (target);
5442 /* Check for a PIC address load. */
5444 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5445 && XEXP (value, 0) == pic_offset_table_rtx
5446 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5447 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5448 || GET_CODE (XEXP (value, 1)) == CONST))
5451 subtarget = gen_reg_rtx (GET_MODE (value));
5452 emit_move_insn (subtarget, value);
5456 if (GET_CODE (value) == PLUS)
5457 binoptab = add_optab;
5458 else if (GET_CODE (value) == MINUS)
5459 binoptab = sub_optab;
5460 else if (GET_CODE (value) == MULT)
5462 op2 = XEXP (value, 1);
5463 if (!CONSTANT_P (op2)
5464 && !(GET_CODE (op2) == REG && op2 != subtarget))
5466 tmp = force_operand (XEXP (value, 0), subtarget);
5467 return expand_mult (GET_MODE (value), tmp,
5468 force_operand (op2, NULL_RTX),
5474 op2 = XEXP (value, 1);
5475 if (!CONSTANT_P (op2)
5476 && !(GET_CODE (op2) == REG && op2 != subtarget))
5478 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5480 binoptab = add_optab;
5481 op2 = negate_rtx (GET_MODE (value), op2);
5484 /* Check for an addition with OP2 a constant integer and our first
5485 operand a PLUS of a virtual register and something else. In that
5486 case, we want to emit the sum of the virtual register and the
5487 constant first and then add the other value. This allows virtual
5488 register instantiation to simply modify the constant rather than
5489 creating another one around this addition. */
5490 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5491 && GET_CODE (XEXP (value, 0)) == PLUS
5492 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5493 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5494 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5496 rtx temp = expand_binop (GET_MODE (value), binoptab,
5497 XEXP (XEXP (value, 0), 0), op2,
5498 subtarget, 0, OPTAB_LIB_WIDEN);
5499 return expand_binop (GET_MODE (value), binoptab, temp,
5500 force_operand (XEXP (XEXP (value, 0), 1), 0),
5501 target, 0, OPTAB_LIB_WIDEN);
5504 tmp = force_operand (XEXP (value, 0), subtarget);
5505 return expand_binop (GET_MODE (value), binoptab, tmp,
5506 force_operand (op2, NULL_RTX),
5507 target, 0, OPTAB_LIB_WIDEN);
5508 /* We give UNSIGNEDP = 0 to expand_binop
5509 because the only operations we are expanding here are signed ones. */
5514 /* Subroutine of expand_expr:
5515 save the non-copied parts (LIST) of an expr (LHS), and return a list
5516 which can restore these values to their previous values,
5517 should something modify their storage. */
5520 save_noncopied_parts (lhs, list)
5527 for (tail = list; tail; tail = TREE_CHAIN (tail))
5528 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5529 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5532 tree part = TREE_VALUE (tail);
5533 tree part_type = TREE_TYPE (part);
5534 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5536 = assign_temp (build_qualified_type (part_type,
5537 (TYPE_QUALS (part_type)
5538 | TYPE_QUAL_CONST)),
5541 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5542 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5543 parts = tree_cons (to_be_saved,
5544 build (RTL_EXPR, part_type, NULL_TREE,
5547 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5552 /* Subroutine of expand_expr:
5553 record the non-copied parts (LIST) of an expr (LHS), and return a list
5554 which specifies the initial values of these parts. */
5557 init_noncopied_parts (lhs, list)
5564 for (tail = list; tail; tail = TREE_CHAIN (tail))
5565 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5566 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5567 else if (TREE_PURPOSE (tail))
5569 tree part = TREE_VALUE (tail);
5570 tree part_type = TREE_TYPE (part);
5571 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5572 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5577 /* Subroutine of expand_expr: return nonzero iff there is no way that
5578 EXP can reference X, which is being modified. TOP_P is nonzero if this
5579 call is going to be used to determine whether we need a temporary
5580 for EXP, as opposed to a recursive call to this function.
5582 It is always safe for this routine to return zero since it merely
5583 searches for optimization opportunities. */
5586 safe_from_p (x, exp, top_p)
5593 static tree save_expr_list;
5596 /* If EXP has varying size, we MUST use a target since we currently
5597 have no way of allocating temporaries of variable size
5598 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5599 So we assume here that something at a higher level has prevented a
5600 clash. This is somewhat bogus, but the best we can do. Only
5601 do this when X is BLKmode and when we are at the top level. */
5602 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5603 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5604 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5605 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5606 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5608 && GET_MODE (x) == BLKmode)
5609 /* If X is in the outgoing argument area, it is always safe. */
5610 || (GET_CODE (x) == MEM
5611 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5612 || (GET_CODE (XEXP (x, 0)) == PLUS
5613 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5616 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5617 find the underlying pseudo. */
5618 if (GET_CODE (x) == SUBREG)
5621 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5625 /* A SAVE_EXPR might appear many times in the expression passed to the
5626 top-level safe_from_p call, and if it has a complex subexpression,
5627 examining it multiple times could result in a combinatorial explosion.
5628 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5629 with optimization took about 28 minutes to compile -- even though it was
5630 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5631 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5632 we have processed. Note that the only test of top_p was above. */
5641 rtn = safe_from_p (x, exp, 0);
5643 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5644 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5649 /* Now look at our tree code and possibly recurse. */
5650 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5653 exp_rtl = DECL_RTL (exp);
5660 if (TREE_CODE (exp) == TREE_LIST)
5661 return ((TREE_VALUE (exp) == 0
5662 || safe_from_p (x, TREE_VALUE (exp), 0))
5663 && (TREE_CHAIN (exp) == 0
5664 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5665 else if (TREE_CODE (exp) == ERROR_MARK)
5666 return 1; /* An already-visited SAVE_EXPR? */
5671 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5675 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5676 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5680 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5681 the expression. If it is set, we conflict iff we are that rtx or
5682 both are in memory. Otherwise, we check all operands of the
5683 expression recursively. */
5685 switch (TREE_CODE (exp))
5688 return (staticp (TREE_OPERAND (exp, 0))
5689 || TREE_STATIC (exp)
5690 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5693 if (GET_CODE (x) == MEM
5694 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5695 get_alias_set (exp)))
5700 /* Assume that the call will clobber all hard registers and
5702 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5703 || GET_CODE (x) == MEM)
5708 /* If a sequence exists, we would have to scan every instruction
5709 in the sequence to see if it was safe. This is probably not
5711 if (RTL_EXPR_SEQUENCE (exp))
5714 exp_rtl = RTL_EXPR_RTL (exp);
5717 case WITH_CLEANUP_EXPR:
5718 exp_rtl = RTL_EXPR_RTL (exp);
5721 case CLEANUP_POINT_EXPR:
5722 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5725 exp_rtl = SAVE_EXPR_RTL (exp);
5729 /* If we've already scanned this, don't do it again. Otherwise,
5730 show we've scanned it and record for clearing the flag if we're
5732 if (TREE_PRIVATE (exp))
5735 TREE_PRIVATE (exp) = 1;
5736 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5738 TREE_PRIVATE (exp) = 0;
5742 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5746 /* The only operand we look at is operand 1. The rest aren't
5747 part of the expression. */
5748 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5750 case METHOD_CALL_EXPR:
5751 /* This takes a rtx argument, but shouldn't appear here. */
5758 /* If we have an rtx, we do not need to scan our operands. */
5762 nops = first_rtl_op (TREE_CODE (exp));
5763 for (i = 0; i < nops; i++)
5764 if (TREE_OPERAND (exp, i) != 0
5765 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5768 /* If this is a language-specific tree code, it may require
5769 special handling. */
5770 if (TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE
5772 && !(*lang_safe_from_p) (x, exp))
5776 /* If we have an rtl, find any enclosed object. Then see if we conflict
5780 if (GET_CODE (exp_rtl) == SUBREG)
5782 exp_rtl = SUBREG_REG (exp_rtl);
5783 if (GET_CODE (exp_rtl) == REG
5784 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5788 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5789 are memory and they conflict. */
5790 return ! (rtx_equal_p (x, exp_rtl)
5791 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5792 && true_dependence (exp_rtl, GET_MODE (x), x,
5793 rtx_addr_varies_p)));
5796 /* If we reach here, it is safe. */
5800 /* Subroutine of expand_expr: return nonzero iff EXP is an
5801 expression whose type is statically determinable. */
5807 if (TREE_CODE (exp) == PARM_DECL
5808 || TREE_CODE (exp) == VAR_DECL
5809 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5810 || TREE_CODE (exp) == COMPONENT_REF
5811 || TREE_CODE (exp) == ARRAY_REF)
5816 /* Subroutine of expand_expr: return rtx if EXP is a
5817 variable or parameter; else return 0. */
5824 switch (TREE_CODE (exp))
5828 return DECL_RTL (exp);
5834 #ifdef MAX_INTEGER_COMPUTATION_MODE
5836 check_max_integer_computation_mode (exp)
5839 enum tree_code code;
5840 enum machine_mode mode;
5842 /* Strip any NOPs that don't change the mode. */
5844 code = TREE_CODE (exp);
5846 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5847 if (code == NOP_EXPR
5848 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5851 /* First check the type of the overall operation. We need only look at
5852 unary, binary and relational operations. */
5853 if (TREE_CODE_CLASS (code) == '1'
5854 || TREE_CODE_CLASS (code) == '2'
5855 || TREE_CODE_CLASS (code) == '<')
5857 mode = TYPE_MODE (TREE_TYPE (exp));
5858 if (GET_MODE_CLASS (mode) == MODE_INT
5859 && mode > MAX_INTEGER_COMPUTATION_MODE)
5860 fatal ("unsupported wide integer operation");
5863 /* Check operand of a unary op. */
5864 if (TREE_CODE_CLASS (code) == '1')
5866 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5867 if (GET_MODE_CLASS (mode) == MODE_INT
5868 && mode > MAX_INTEGER_COMPUTATION_MODE)
5869 fatal ("unsupported wide integer operation");
5872 /* Check operands of a binary/comparison op. */
5873 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5875 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5876 if (GET_MODE_CLASS (mode) == MODE_INT
5877 && mode > MAX_INTEGER_COMPUTATION_MODE)
5878 fatal ("unsupported wide integer operation");
5880 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5881 if (GET_MODE_CLASS (mode) == MODE_INT
5882 && mode > MAX_INTEGER_COMPUTATION_MODE)
5883 fatal ("unsupported wide integer operation");
5888 /* expand_expr: generate code for computing expression EXP.
5889 An rtx for the computed value is returned. The value is never null.
5890 In the case of a void EXP, const0_rtx is returned.
5892 The value may be stored in TARGET if TARGET is nonzero.
5893 TARGET is just a suggestion; callers must assume that
5894 the rtx returned may not be the same as TARGET.
5896 If TARGET is CONST0_RTX, it means that the value will be ignored.
5898 If TMODE is not VOIDmode, it suggests generating the
5899 result in mode TMODE. But this is done only when convenient.
5900 Otherwise, TMODE is ignored and the value generated in its natural mode.
5901 TMODE is just a suggestion; callers must assume that
5902 the rtx returned may not have mode TMODE.
5904 Note that TARGET may have neither TMODE nor MODE. In that case, it
5905 probably will not be used.
5907 If MODIFIER is EXPAND_SUM then when EXP is an addition
5908 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5909 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5910 products as above, or REG or MEM, or constant.
5911 Ordinarily in such cases we would output mul or add instructions
5912 and then return a pseudo reg containing the sum.
5914 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5915 it also marks a label as absolutely required (it can't be dead).
5916 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5917 This is used for outputting expressions used in initializers.
5919 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5920 with a constant address even if that address is not normally legitimate.
5921 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5924 expand_expr (exp, target, tmode, modifier)
5927 enum machine_mode tmode;
5928 enum expand_modifier modifier;
5930 register rtx op0, op1, temp;
5931 tree type = TREE_TYPE (exp);
5932 int unsignedp = TREE_UNSIGNED (type);
5933 register enum machine_mode mode;
5934 register enum tree_code code = TREE_CODE (exp);
5936 rtx subtarget, original_target;
5939 /* Used by check-memory-usage to make modifier read only. */
5940 enum expand_modifier ro_modifier;
5942 /* Handle ERROR_MARK before anybody tries to access its type. */
5943 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5945 op0 = CONST0_RTX (tmode);
5951 mode = TYPE_MODE (type);
5952 /* Use subtarget as the target for operand 0 of a binary operation. */
5953 subtarget = get_subtarget (target);
5954 original_target = target;
5955 ignore = (target == const0_rtx
5956 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5957 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5958 || code == COND_EXPR)
5959 && TREE_CODE (type) == VOID_TYPE));
5961 /* Make a read-only version of the modifier. */
5962 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5963 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5964 ro_modifier = modifier;
5966 ro_modifier = EXPAND_NORMAL;
5968 /* If we are going to ignore this result, we need only do something
5969 if there is a side-effect somewhere in the expression. If there
5970 is, short-circuit the most common cases here. Note that we must
5971 not call expand_expr with anything but const0_rtx in case this
5972 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5976 if (! TREE_SIDE_EFFECTS (exp))
5979 /* Ensure we reference a volatile object even if value is ignored, but
5980 don't do this if all we are doing is taking its address. */
5981 if (TREE_THIS_VOLATILE (exp)
5982 && TREE_CODE (exp) != FUNCTION_DECL
5983 && mode != VOIDmode && mode != BLKmode
5984 && modifier != EXPAND_CONST_ADDRESS)
5986 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5987 if (GET_CODE (temp) == MEM)
5988 temp = copy_to_reg (temp);
5992 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5993 || code == INDIRECT_REF || code == BUFFER_REF)
5994 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5995 VOIDmode, ro_modifier);
5996 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5997 || code == ARRAY_REF)
5999 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6000 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6003 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6004 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6005 /* If the second operand has no side effects, just evaluate
6007 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6008 VOIDmode, ro_modifier);
6009 else if (code == BIT_FIELD_REF)
6011 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6012 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6013 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
6020 #ifdef MAX_INTEGER_COMPUTATION_MODE
6021 /* Only check stuff here if the mode we want is different from the mode
6022 of the expression; if it's the same, check_max_integer_computiation_mode
6023 will handle it. Do we really need to check this stuff at all? */
6026 && GET_MODE (target) != mode
6027 && TREE_CODE (exp) != INTEGER_CST
6028 && TREE_CODE (exp) != PARM_DECL
6029 && TREE_CODE (exp) != ARRAY_REF
6030 && TREE_CODE (exp) != COMPONENT_REF
6031 && TREE_CODE (exp) != BIT_FIELD_REF
6032 && TREE_CODE (exp) != INDIRECT_REF
6033 && TREE_CODE (exp) != CALL_EXPR
6034 && TREE_CODE (exp) != VAR_DECL
6035 && TREE_CODE (exp) != RTL_EXPR)
6037 enum machine_mode mode = GET_MODE (target);
6039 if (GET_MODE_CLASS (mode) == MODE_INT
6040 && mode > MAX_INTEGER_COMPUTATION_MODE)
6041 fatal ("unsupported wide integer operation");
6045 && TREE_CODE (exp) != INTEGER_CST
6046 && TREE_CODE (exp) != PARM_DECL
6047 && TREE_CODE (exp) != ARRAY_REF
6048 && TREE_CODE (exp) != COMPONENT_REF
6049 && TREE_CODE (exp) != BIT_FIELD_REF
6050 && TREE_CODE (exp) != INDIRECT_REF
6051 && TREE_CODE (exp) != VAR_DECL
6052 && TREE_CODE (exp) != CALL_EXPR
6053 && TREE_CODE (exp) != RTL_EXPR
6054 && GET_MODE_CLASS (tmode) == MODE_INT
6055 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6056 fatal ("unsupported wide integer operation");
6058 check_max_integer_computation_mode (exp);
6061 /* If will do cse, generate all results into pseudo registers
6062 since 1) that allows cse to find more things
6063 and 2) otherwise cse could produce an insn the machine
6066 if (! cse_not_expected && mode != BLKmode && target
6067 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6074 tree function = decl_function_context (exp);
6075 /* Handle using a label in a containing function. */
6076 if (function != current_function_decl
6077 && function != inline_function_decl && function != 0)
6079 struct function *p = find_function_data (function);
6080 p->expr->x_forced_labels
6081 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6082 p->expr->x_forced_labels);
6086 if (modifier == EXPAND_INITIALIZER)
6087 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6092 temp = gen_rtx_MEM (FUNCTION_MODE,
6093 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6094 if (function != current_function_decl
6095 && function != inline_function_decl && function != 0)
6096 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6101 if (DECL_RTL (exp) == 0)
6103 error_with_decl (exp, "prior parameter's size depends on `%s'");
6104 return CONST0_RTX (mode);
6107 /* ... fall through ... */
6110 /* If a static var's type was incomplete when the decl was written,
6111 but the type is complete now, lay out the decl now. */
6112 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6113 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6115 layout_decl (exp, 0);
6116 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6119 /* Although static-storage variables start off initialized, according to
6120 ANSI C, a memcpy could overwrite them with uninitialized values. So
6121 we check them too. This also lets us check for read-only variables
6122 accessed via a non-const declaration, in case it won't be detected
6123 any other way (e.g., in an embedded system or OS kernel without
6126 Aggregates are not checked here; they're handled elsewhere. */
6127 if (cfun && current_function_check_memory_usage
6129 && GET_CODE (DECL_RTL (exp)) == MEM
6130 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6132 enum memory_use_mode memory_usage;
6133 memory_usage = get_memory_usage_from_modifier (modifier);
6135 in_check_memory_usage = 1;
6136 if (memory_usage != MEMORY_USE_DONT)
6137 emit_library_call (chkr_check_addr_libfunc,
6138 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6139 XEXP (DECL_RTL (exp), 0), Pmode,
6140 GEN_INT (int_size_in_bytes (type)),
6141 TYPE_MODE (sizetype),
6142 GEN_INT (memory_usage),
6143 TYPE_MODE (integer_type_node));
6144 in_check_memory_usage = 0;
6147 /* ... fall through ... */
6151 if (DECL_RTL (exp) == 0)
6154 /* Ensure variable marked as used even if it doesn't go through
6155 a parser. If it hasn't be used yet, write out an external
6157 if (! TREE_USED (exp))
6159 assemble_external (exp);
6160 TREE_USED (exp) = 1;
6163 /* Show we haven't gotten RTL for this yet. */
6166 /* Handle variables inherited from containing functions. */
6167 context = decl_function_context (exp);
6169 /* We treat inline_function_decl as an alias for the current function
6170 because that is the inline function whose vars, types, etc.
6171 are being merged into the current function.
6172 See expand_inline_function. */
6174 if (context != 0 && context != current_function_decl
6175 && context != inline_function_decl
6176 /* If var is static, we don't need a static chain to access it. */
6177 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6178 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6182 /* Mark as non-local and addressable. */
6183 DECL_NONLOCAL (exp) = 1;
6184 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6186 mark_addressable (exp);
6187 if (GET_CODE (DECL_RTL (exp)) != MEM)
6189 addr = XEXP (DECL_RTL (exp), 0);
6190 if (GET_CODE (addr) == MEM)
6191 addr = change_address (addr, Pmode,
6192 fix_lexical_addr (XEXP (addr, 0), exp));
6194 addr = fix_lexical_addr (addr, exp);
6196 temp = change_address (DECL_RTL (exp), mode, addr);
6199 /* This is the case of an array whose size is to be determined
6200 from its initializer, while the initializer is still being parsed.
6203 else if (GET_CODE (DECL_RTL (exp)) == MEM
6204 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6205 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6206 XEXP (DECL_RTL (exp), 0));
6208 /* If DECL_RTL is memory, we are in the normal case and either
6209 the address is not valid or it is not a register and -fforce-addr
6210 is specified, get the address into a register. */
6212 else if (GET_CODE (DECL_RTL (exp)) == MEM
6213 && modifier != EXPAND_CONST_ADDRESS
6214 && modifier != EXPAND_SUM
6215 && modifier != EXPAND_INITIALIZER
6216 && (! memory_address_p (DECL_MODE (exp),
6217 XEXP (DECL_RTL (exp), 0))
6219 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6220 temp = change_address (DECL_RTL (exp), VOIDmode,
6221 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6223 /* If we got something, return it. But first, set the alignment
6224 the address is a register. */
6227 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6228 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6233 /* If the mode of DECL_RTL does not match that of the decl, it
6234 must be a promoted value. We return a SUBREG of the wanted mode,
6235 but mark it so that we know that it was already extended. */
6237 if (GET_CODE (DECL_RTL (exp)) == REG
6238 && GET_MODE (DECL_RTL (exp)) != mode)
6240 /* Get the signedness used for this variable. Ensure we get the
6241 same mode we got when the variable was declared. */
6242 if (GET_MODE (DECL_RTL (exp))
6243 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6246 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6247 SUBREG_PROMOTED_VAR_P (temp) = 1;
6248 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6252 return DECL_RTL (exp);
6255 return immed_double_const (TREE_INT_CST_LOW (exp),
6256 TREE_INT_CST_HIGH (exp), mode);
6259 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6260 EXPAND_MEMORY_USE_BAD);
6263 /* If optimized, generate immediate CONST_DOUBLE
6264 which will be turned into memory by reload if necessary.
6266 We used to force a register so that loop.c could see it. But
6267 this does not allow gen_* patterns to perform optimizations with
6268 the constants. It also produces two insns in cases like "x = 1.0;".
6269 On most machines, floating-point constants are not permitted in
6270 many insns, so we'd end up copying it to a register in any case.
6272 Now, we do the copying in expand_binop, if appropriate. */
6273 return immed_real_const (exp);
6277 if (! TREE_CST_RTL (exp))
6278 output_constant_def (exp, 1);
6280 /* TREE_CST_RTL probably contains a constant address.
6281 On RISC machines where a constant address isn't valid,
6282 make some insns to get that address into a register. */
6283 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6284 && modifier != EXPAND_CONST_ADDRESS
6285 && modifier != EXPAND_INITIALIZER
6286 && modifier != EXPAND_SUM
6287 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6289 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6290 return change_address (TREE_CST_RTL (exp), VOIDmode,
6291 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6292 return TREE_CST_RTL (exp);
6294 case EXPR_WITH_FILE_LOCATION:
6297 const char *saved_input_filename = input_filename;
6298 int saved_lineno = lineno;
6299 input_filename = EXPR_WFL_FILENAME (exp);
6300 lineno = EXPR_WFL_LINENO (exp);
6301 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6302 emit_line_note (input_filename, lineno);
6303 /* Possibly avoid switching back and force here. */
6304 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6305 input_filename = saved_input_filename;
6306 lineno = saved_lineno;
6311 context = decl_function_context (exp);
6313 /* If this SAVE_EXPR was at global context, assume we are an
6314 initialization function and move it into our context. */
6316 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6318 /* We treat inline_function_decl as an alias for the current function
6319 because that is the inline function whose vars, types, etc.
6320 are being merged into the current function.
6321 See expand_inline_function. */
6322 if (context == current_function_decl || context == inline_function_decl)
6325 /* If this is non-local, handle it. */
6328 /* The following call just exists to abort if the context is
6329 not of a containing function. */
6330 find_function_data (context);
6332 temp = SAVE_EXPR_RTL (exp);
6333 if (temp && GET_CODE (temp) == REG)
6335 put_var_into_stack (exp);
6336 temp = SAVE_EXPR_RTL (exp);
6338 if (temp == 0 || GET_CODE (temp) != MEM)
6340 return change_address (temp, mode,
6341 fix_lexical_addr (XEXP (temp, 0), exp));
6343 if (SAVE_EXPR_RTL (exp) == 0)
6345 if (mode == VOIDmode)
6348 temp = assign_temp (build_qualified_type (type,
6350 | TYPE_QUAL_CONST)),
6353 SAVE_EXPR_RTL (exp) = temp;
6354 if (!optimize && GET_CODE (temp) == REG)
6355 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6358 /* If the mode of TEMP does not match that of the expression, it
6359 must be a promoted value. We pass store_expr a SUBREG of the
6360 wanted mode but mark it so that we know that it was already
6361 extended. Note that `unsignedp' was modified above in
6364 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6366 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6367 SUBREG_PROMOTED_VAR_P (temp) = 1;
6368 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6371 if (temp == const0_rtx)
6372 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6373 EXPAND_MEMORY_USE_BAD);
6375 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6377 TREE_USED (exp) = 1;
6380 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6381 must be a promoted value. We return a SUBREG of the wanted mode,
6382 but mark it so that we know that it was already extended. */
6384 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6385 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6387 /* Compute the signedness and make the proper SUBREG. */
6388 promote_mode (type, mode, &unsignedp, 0);
6389 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6390 SUBREG_PROMOTED_VAR_P (temp) = 1;
6391 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6395 return SAVE_EXPR_RTL (exp);
6400 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6401 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6405 case PLACEHOLDER_EXPR:
6407 tree placeholder_expr;
6409 /* If there is an object on the head of the placeholder list,
6410 see if some object in it of type TYPE or a pointer to it. For
6411 further information, see tree.def. */
6412 for (placeholder_expr = placeholder_list;
6413 placeholder_expr != 0;
6414 placeholder_expr = TREE_CHAIN (placeholder_expr))
6416 tree need_type = TYPE_MAIN_VARIANT (type);
6418 tree old_list = placeholder_list;
6421 /* Find the outermost reference that is of the type we want.
6422 If none, see if any object has a type that is a pointer to
6423 the type we want. */
6424 for (elt = TREE_PURPOSE (placeholder_expr);
6425 elt != 0 && object == 0;
6427 = ((TREE_CODE (elt) == COMPOUND_EXPR
6428 || TREE_CODE (elt) == COND_EXPR)
6429 ? TREE_OPERAND (elt, 1)
6430 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6431 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6432 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6433 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6434 ? TREE_OPERAND (elt, 0) : 0))
6435 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6438 for (elt = TREE_PURPOSE (placeholder_expr);
6439 elt != 0 && object == 0;
6441 = ((TREE_CODE (elt) == COMPOUND_EXPR
6442 || TREE_CODE (elt) == COND_EXPR)
6443 ? TREE_OPERAND (elt, 1)
6444 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6445 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6446 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6447 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6448 ? TREE_OPERAND (elt, 0) : 0))
6449 if (POINTER_TYPE_P (TREE_TYPE (elt))
6450 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6452 object = build1 (INDIRECT_REF, need_type, elt);
6456 /* Expand this object skipping the list entries before
6457 it was found in case it is also a PLACEHOLDER_EXPR.
6458 In that case, we want to translate it using subsequent
6460 placeholder_list = TREE_CHAIN (placeholder_expr);
6461 temp = expand_expr (object, original_target, tmode,
6463 placeholder_list = old_list;
6469 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6472 case WITH_RECORD_EXPR:
6473 /* Put the object on the placeholder list, expand our first operand,
6474 and pop the list. */
6475 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6477 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6478 tmode, ro_modifier);
6479 placeholder_list = TREE_CHAIN (placeholder_list);
6483 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6484 expand_goto (TREE_OPERAND (exp, 0));
6486 expand_computed_goto (TREE_OPERAND (exp, 0));
6490 expand_exit_loop_if_false (NULL_PTR,
6491 invert_truthvalue (TREE_OPERAND (exp, 0)));
6494 case LABELED_BLOCK_EXPR:
6495 if (LABELED_BLOCK_BODY (exp))
6496 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6497 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6500 case EXIT_BLOCK_EXPR:
6501 if (EXIT_BLOCK_RETURN (exp))
6502 sorry ("returned value in block_exit_expr");
6503 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6508 expand_start_loop (1);
6509 expand_expr_stmt (TREE_OPERAND (exp, 0));
6517 tree vars = TREE_OPERAND (exp, 0);
6518 int vars_need_expansion = 0;
6520 /* Need to open a binding contour here because
6521 if there are any cleanups they must be contained here. */
6522 expand_start_bindings (2);
6524 /* Mark the corresponding BLOCK for output in its proper place. */
6525 if (TREE_OPERAND (exp, 2) != 0
6526 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6527 insert_block (TREE_OPERAND (exp, 2));
6529 /* If VARS have not yet been expanded, expand them now. */
6532 if (DECL_RTL (vars) == 0)
6534 vars_need_expansion = 1;
6537 expand_decl_init (vars);
6538 vars = TREE_CHAIN (vars);
6541 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6543 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6549 if (RTL_EXPR_SEQUENCE (exp))
6551 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6553 emit_insns (RTL_EXPR_SEQUENCE (exp));
6554 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6556 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6557 free_temps_for_rtl_expr (exp);
6558 return RTL_EXPR_RTL (exp);
6561 /* If we don't need the result, just ensure we evaluate any
6566 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6567 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6568 EXPAND_MEMORY_USE_BAD);
6572 /* All elts simple constants => refer to a constant in memory. But
6573 if this is a non-BLKmode mode, let it store a field at a time
6574 since that should make a CONST_INT or CONST_DOUBLE when we
6575 fold. Likewise, if we have a target we can use, it is best to
6576 store directly into the target unless the type is large enough
6577 that memcpy will be used. If we are making an initializer and
6578 all operands are constant, put it in memory as well. */
6579 else if ((TREE_STATIC (exp)
6580 && ((mode == BLKmode
6581 && ! (target != 0 && safe_from_p (target, exp, 1)))
6582 || TREE_ADDRESSABLE (exp)
6583 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6584 && (! MOVE_BY_PIECES_P
6585 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6587 && ! mostly_zeros_p (exp))))
6588 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6590 rtx constructor = output_constant_def (exp, 1);
6592 if (modifier != EXPAND_CONST_ADDRESS
6593 && modifier != EXPAND_INITIALIZER
6594 && modifier != EXPAND_SUM
6595 && (! memory_address_p (GET_MODE (constructor),
6596 XEXP (constructor, 0))
6598 && GET_CODE (XEXP (constructor, 0)) != REG)))
6599 constructor = change_address (constructor, VOIDmode,
6600 XEXP (constructor, 0));
6605 /* Handle calls that pass values in multiple non-contiguous
6606 locations. The Irix 6 ABI has examples of this. */
6607 if (target == 0 || ! safe_from_p (target, exp, 1)
6608 || GET_CODE (target) == PARALLEL)
6610 = assign_temp (build_qualified_type (type,
6612 | (TREE_READONLY (exp)
6613 * TYPE_QUAL_CONST))),
6614 TREE_ADDRESSABLE (exp), 1, 1);
6616 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6617 int_size_in_bytes (TREE_TYPE (exp)));
6623 tree exp1 = TREE_OPERAND (exp, 0);
6625 tree string = string_constant (exp1, &index);
6627 /* Try to optimize reads from const strings. */
6629 && TREE_CODE (string) == STRING_CST
6630 && TREE_CODE (index) == INTEGER_CST
6631 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6632 && GET_MODE_CLASS (mode) == MODE_INT
6633 && GET_MODE_SIZE (mode) == 1
6634 && modifier != EXPAND_MEMORY_USE_WO)
6636 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6638 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6639 op0 = memory_address (mode, op0);
6641 if (cfun && current_function_check_memory_usage
6642 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6644 enum memory_use_mode memory_usage;
6645 memory_usage = get_memory_usage_from_modifier (modifier);
6647 if (memory_usage != MEMORY_USE_DONT)
6649 in_check_memory_usage = 1;
6650 emit_library_call (chkr_check_addr_libfunc,
6651 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6652 Pmode, GEN_INT (int_size_in_bytes (type)),
6653 TYPE_MODE (sizetype),
6654 GEN_INT (memory_usage),
6655 TYPE_MODE (integer_type_node));
6656 in_check_memory_usage = 0;
6660 temp = gen_rtx_MEM (mode, op0);
6661 set_mem_attributes (temp, exp, 0);
6663 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6664 here, because, in C and C++, the fact that a location is accessed
6665 through a pointer to const does not mean that the value there can
6666 never change. Languages where it can never change should
6667 also set TREE_STATIC. */
6668 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6670 /* If we are writing to this object and its type is a record with
6671 readonly fields, we must mark it as readonly so it will
6672 conflict with readonly references to those fields. */
6673 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6674 RTX_UNCHANGING_P (temp) = 1;
6680 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6684 tree array = TREE_OPERAND (exp, 0);
6685 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6686 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6687 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6690 /* Optimize the special-case of a zero lower bound.
6692 We convert the low_bound to sizetype to avoid some problems
6693 with constant folding. (E.g. suppose the lower bound is 1,
6694 and its mode is QI. Without the conversion, (ARRAY
6695 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6696 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6698 if (! integer_zerop (low_bound))
6699 index = size_diffop (index, convert (sizetype, low_bound));
6701 /* Fold an expression like: "foo"[2].
6702 This is not done in fold so it won't happen inside &.
6703 Don't fold if this is for wide characters since it's too
6704 difficult to do correctly and this is a very rare case. */
6706 if (TREE_CODE (array) == STRING_CST
6707 && TREE_CODE (index) == INTEGER_CST
6708 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6709 && GET_MODE_CLASS (mode) == MODE_INT
6710 && GET_MODE_SIZE (mode) == 1)
6712 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6714 /* If this is a constant index into a constant array,
6715 just get the value from the array. Handle both the cases when
6716 we have an explicit constructor and when our operand is a variable
6717 that was declared const. */
6719 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6720 && TREE_CODE (index) == INTEGER_CST
6721 && 0 > compare_tree_int (index,
6722 list_length (CONSTRUCTOR_ELTS
6723 (TREE_OPERAND (exp, 0)))))
6727 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6728 i = TREE_INT_CST_LOW (index);
6729 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6733 return expand_expr (fold (TREE_VALUE (elem)), target,
6734 tmode, ro_modifier);
6737 else if (optimize >= 1
6738 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6739 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6740 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6742 if (TREE_CODE (index) == INTEGER_CST)
6744 tree init = DECL_INITIAL (array);
6746 if (TREE_CODE (init) == CONSTRUCTOR)
6750 for (elem = CONSTRUCTOR_ELTS (init);
6752 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6753 elem = TREE_CHAIN (elem))
6757 return expand_expr (fold (TREE_VALUE (elem)), target,
6758 tmode, ro_modifier);
6760 else if (TREE_CODE (init) == STRING_CST
6761 && 0 > compare_tree_int (index,
6762 TREE_STRING_LENGTH (init)))
6764 tree type = TREE_TYPE (TREE_TYPE (init));
6765 enum machine_mode mode = TYPE_MODE (type);
6767 if (GET_MODE_CLASS (mode) == MODE_INT
6768 && GET_MODE_SIZE (mode) == 1)
6770 (TREE_STRING_POINTER
6771 (init)[TREE_INT_CST_LOW (index)]));
6780 /* If the operand is a CONSTRUCTOR, we can just extract the
6781 appropriate field if it is present. Don't do this if we have
6782 already written the data since we want to refer to that copy
6783 and varasm.c assumes that's what we'll do. */
6784 if (code != ARRAY_REF
6785 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6786 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6790 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6791 elt = TREE_CHAIN (elt))
6792 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6793 /* We can normally use the value of the field in the
6794 CONSTRUCTOR. However, if this is a bitfield in
6795 an integral mode that we can fit in a HOST_WIDE_INT,
6796 we must mask only the number of bits in the bitfield,
6797 since this is done implicitly by the constructor. If
6798 the bitfield does not meet either of those conditions,
6799 we can't do this optimization. */
6800 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6801 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6803 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6804 <= HOST_BITS_PER_WIDE_INT))))
6806 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6807 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6809 HOST_WIDE_INT bitsize
6810 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6812 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6814 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6815 op0 = expand_and (op0, op1, target);
6819 enum machine_mode imode
6820 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6822 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6825 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6827 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6837 enum machine_mode mode1;
6838 HOST_WIDE_INT bitsize, bitpos;
6841 unsigned int alignment;
6842 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6843 &mode1, &unsignedp, &volatilep,
6846 /* If we got back the original object, something is wrong. Perhaps
6847 we are evaluating an expression too early. In any event, don't
6848 infinitely recurse. */
6852 /* If TEM's type is a union of variable size, pass TARGET to the inner
6853 computation, since it will need a temporary and TARGET is known
6854 to have to do. This occurs in unchecked conversion in Ada. */
6856 op0 = expand_expr (tem,
6857 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6858 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6860 ? target : NULL_RTX),
6862 (modifier == EXPAND_INITIALIZER
6863 || modifier == EXPAND_CONST_ADDRESS)
6864 ? modifier : EXPAND_NORMAL);
6866 /* If this is a constant, put it into a register if it is a
6867 legitimate constant and OFFSET is 0 and memory if it isn't. */
6868 if (CONSTANT_P (op0))
6870 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6871 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6873 op0 = force_reg (mode, op0);
6875 op0 = validize_mem (force_const_mem (mode, op0));
6880 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6882 /* If this object is in memory, put it into a register.
6883 This case can't occur in C, but can in Ada if we have
6884 unchecked conversion of an expression from a scalar type to
6885 an array or record type. */
6886 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6887 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6889 tree nt = build_qualified_type (TREE_TYPE (tem),
6890 (TYPE_QUALS (TREE_TYPE (tem))
6891 | TYPE_QUAL_CONST));
6892 rtx memloc = assign_temp (nt, 1, 1, 1);
6894 mark_temp_addr_taken (memloc);
6895 emit_move_insn (memloc, op0);
6899 if (GET_CODE (op0) != MEM)
6902 if (GET_MODE (offset_rtx) != ptr_mode)
6904 #ifdef POINTERS_EXTEND_UNSIGNED
6905 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6907 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6911 /* A constant address in OP0 can have VOIDmode, we must not try
6912 to call force_reg for that case. Avoid that case. */
6913 if (GET_CODE (op0) == MEM
6914 && GET_MODE (op0) == BLKmode
6915 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6917 && (bitpos % bitsize) == 0
6918 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6919 && alignment == GET_MODE_ALIGNMENT (mode1))
6921 rtx temp = change_address (op0, mode1,
6922 plus_constant (XEXP (op0, 0),
6925 if (GET_CODE (XEXP (temp, 0)) == REG)
6928 op0 = change_address (op0, mode1,
6929 force_reg (GET_MODE (XEXP (temp, 0)),
6934 op0 = change_address (op0, VOIDmode,
6935 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6936 force_reg (ptr_mode,
6940 /* Don't forget about volatility even if this is a bitfield. */
6941 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6943 op0 = copy_rtx (op0);
6944 MEM_VOLATILE_P (op0) = 1;
6947 /* Check the access. */
6948 if (cfun != 0 && current_function_check_memory_usage
6949 && GET_CODE (op0) == MEM)
6951 enum memory_use_mode memory_usage;
6952 memory_usage = get_memory_usage_from_modifier (modifier);
6954 if (memory_usage != MEMORY_USE_DONT)
6959 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6960 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6962 /* Check the access right of the pointer. */
6963 in_check_memory_usage = 1;
6964 if (size > BITS_PER_UNIT)
6965 emit_library_call (chkr_check_addr_libfunc,
6966 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
6967 Pmode, GEN_INT (size / BITS_PER_UNIT),
6968 TYPE_MODE (sizetype),
6969 GEN_INT (memory_usage),
6970 TYPE_MODE (integer_type_node));
6971 in_check_memory_usage = 0;
6975 /* In cases where an aligned union has an unaligned object
6976 as a field, we might be extracting a BLKmode value from
6977 an integer-mode (e.g., SImode) object. Handle this case
6978 by doing the extract into an object as wide as the field
6979 (which we know to be the width of a basic mode), then
6980 storing into memory, and changing the mode to BLKmode.
6981 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6982 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6983 if (mode1 == VOIDmode
6984 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6985 || (modifier != EXPAND_CONST_ADDRESS
6986 && modifier != EXPAND_INITIALIZER
6987 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6988 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6989 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6990 /* If the field isn't aligned enough to fetch as a memref,
6991 fetch it as a bit field. */
6992 || (mode1 != BLKmode
6993 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6994 && ((TYPE_ALIGN (TREE_TYPE (tem))
6995 < GET_MODE_ALIGNMENT (mode))
6996 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6997 /* If the type and the field are a constant size and the
6998 size of the type isn't the same size as the bitfield,
6999 we must use bitfield operations. */
7001 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7003 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7005 || (modifier != EXPAND_CONST_ADDRESS
7006 && modifier != EXPAND_INITIALIZER
7008 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7009 && (TYPE_ALIGN (type) > alignment
7010 || bitpos % TYPE_ALIGN (type) != 0)))
7012 enum machine_mode ext_mode = mode;
7014 if (ext_mode == BLKmode
7015 && ! (target != 0 && GET_CODE (op0) == MEM
7016 && GET_CODE (target) == MEM
7017 && bitpos % BITS_PER_UNIT == 0))
7018 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7020 if (ext_mode == BLKmode)
7022 /* In this case, BITPOS must start at a byte boundary and
7023 TARGET, if specified, must be a MEM. */
7024 if (GET_CODE (op0) != MEM
7025 || (target != 0 && GET_CODE (target) != MEM)
7026 || bitpos % BITS_PER_UNIT != 0)
7029 op0 = change_address (op0, VOIDmode,
7030 plus_constant (XEXP (op0, 0),
7031 bitpos / BITS_PER_UNIT));
7033 target = assign_temp (type, 0, 1, 1);
7035 emit_block_move (target, op0,
7036 bitsize == -1 ? expr_size (exp)
7037 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7044 op0 = validize_mem (op0);
7046 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7047 mark_reg_pointer (XEXP (op0, 0), alignment);
7049 op0 = extract_bit_field (op0, bitsize, bitpos,
7050 unsignedp, target, ext_mode, ext_mode,
7052 int_size_in_bytes (TREE_TYPE (tem)));
7054 /* If the result is a record type and BITSIZE is narrower than
7055 the mode of OP0, an integral mode, and this is a big endian
7056 machine, we must put the field into the high-order bits. */
7057 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7058 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7059 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7060 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7061 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7065 if (mode == BLKmode)
7067 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7069 rtx new = assign_temp (nt, 0, 1, 1);
7071 emit_move_insn (new, op0);
7072 op0 = copy_rtx (new);
7073 PUT_MODE (op0, BLKmode);
7079 /* If the result is BLKmode, use that to access the object
7081 if (mode == BLKmode)
7084 /* Get a reference to just this component. */
7085 if (modifier == EXPAND_CONST_ADDRESS
7086 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7088 rtx new = gen_rtx_MEM (mode1,
7089 plus_constant (XEXP (op0, 0),
7090 (bitpos / BITS_PER_UNIT)));
7092 MEM_COPY_ATTRIBUTES (new, op0);
7096 op0 = change_address (op0, mode1,
7097 plus_constant (XEXP (op0, 0),
7098 (bitpos / BITS_PER_UNIT)));
7100 set_mem_attributes (op0, exp, 0);
7101 if (GET_CODE (XEXP (op0, 0)) == REG)
7102 mark_reg_pointer (XEXP (op0, 0), alignment);
7104 MEM_VOLATILE_P (op0) |= volatilep;
7105 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7106 || modifier == EXPAND_CONST_ADDRESS
7107 || modifier == EXPAND_INITIALIZER)
7109 else if (target == 0)
7110 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7112 convert_move (target, op0, unsignedp);
7116 /* Intended for a reference to a buffer of a file-object in Pascal.
7117 But it's not certain that a special tree code will really be
7118 necessary for these. INDIRECT_REF might work for them. */
7124 /* Pascal set IN expression.
7127 rlo = set_low - (set_low%bits_per_word);
7128 the_word = set [ (index - rlo)/bits_per_word ];
7129 bit_index = index % bits_per_word;
7130 bitmask = 1 << bit_index;
7131 return !!(the_word & bitmask); */
7133 tree set = TREE_OPERAND (exp, 0);
7134 tree index = TREE_OPERAND (exp, 1);
7135 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7136 tree set_type = TREE_TYPE (set);
7137 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7138 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7139 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7140 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7141 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7142 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7143 rtx setaddr = XEXP (setval, 0);
7144 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7146 rtx diff, quo, rem, addr, bit, result;
7148 /* If domain is empty, answer is no. Likewise if index is constant
7149 and out of bounds. */
7150 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7151 && TREE_CODE (set_low_bound) == INTEGER_CST
7152 && tree_int_cst_lt (set_high_bound, set_low_bound))
7153 || (TREE_CODE (index) == INTEGER_CST
7154 && TREE_CODE (set_low_bound) == INTEGER_CST
7155 && tree_int_cst_lt (index, set_low_bound))
7156 || (TREE_CODE (set_high_bound) == INTEGER_CST
7157 && TREE_CODE (index) == INTEGER_CST
7158 && tree_int_cst_lt (set_high_bound, index))))
7162 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7164 /* If we get here, we have to generate the code for both cases
7165 (in range and out of range). */
7167 op0 = gen_label_rtx ();
7168 op1 = gen_label_rtx ();
7170 if (! (GET_CODE (index_val) == CONST_INT
7171 && GET_CODE (lo_r) == CONST_INT))
7173 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7174 GET_MODE (index_val), iunsignedp, 0, op1);
7177 if (! (GET_CODE (index_val) == CONST_INT
7178 && GET_CODE (hi_r) == CONST_INT))
7180 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7181 GET_MODE (index_val), iunsignedp, 0, op1);
7184 /* Calculate the element number of bit zero in the first word
7186 if (GET_CODE (lo_r) == CONST_INT)
7187 rlow = GEN_INT (INTVAL (lo_r)
7188 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7190 rlow = expand_binop (index_mode, and_optab, lo_r,
7191 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7192 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7194 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7195 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7197 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7198 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7199 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7200 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7202 addr = memory_address (byte_mode,
7203 expand_binop (index_mode, add_optab, diff,
7204 setaddr, NULL_RTX, iunsignedp,
7207 /* Extract the bit we want to examine. */
7208 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7209 gen_rtx_MEM (byte_mode, addr),
7210 make_tree (TREE_TYPE (index), rem),
7212 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7213 GET_MODE (target) == byte_mode ? target : 0,
7214 1, OPTAB_LIB_WIDEN);
7216 if (result != target)
7217 convert_move (target, result, 1);
7219 /* Output the code to handle the out-of-range case. */
7222 emit_move_insn (target, const0_rtx);
7227 case WITH_CLEANUP_EXPR:
7228 if (RTL_EXPR_RTL (exp) == 0)
7231 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7232 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7234 /* That's it for this cleanup. */
7235 TREE_OPERAND (exp, 2) = 0;
7237 return RTL_EXPR_RTL (exp);
7239 case CLEANUP_POINT_EXPR:
7241 /* Start a new binding layer that will keep track of all cleanup
7242 actions to be performed. */
7243 expand_start_bindings (2);
7245 target_temp_slot_level = temp_slot_level;
7247 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7248 /* If we're going to use this value, load it up now. */
7250 op0 = force_not_mem (op0);
7251 preserve_temp_slots (op0);
7252 expand_end_bindings (NULL_TREE, 0, 0);
7257 /* Check for a built-in function. */
7258 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7259 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7261 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7263 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7264 == BUILT_IN_FRONTEND)
7265 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7267 return expand_builtin (exp, target, subtarget, tmode, ignore);
7270 return expand_call (exp, target, ignore);
7272 case NON_LVALUE_EXPR:
7275 case REFERENCE_EXPR:
7276 if (TREE_OPERAND (exp, 0) == error_mark_node)
7279 if (TREE_CODE (type) == UNION_TYPE)
7281 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7283 /* If both input and output are BLKmode, this conversion
7284 isn't actually doing anything unless we need to make the
7285 alignment stricter. */
7286 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7287 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7288 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7289 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7293 target = assign_temp (type, 0, 1, 1);
7295 if (GET_CODE (target) == MEM)
7296 /* Store data into beginning of memory target. */
7297 store_expr (TREE_OPERAND (exp, 0),
7298 change_address (target, TYPE_MODE (valtype), 0), 0);
7300 else if (GET_CODE (target) == REG)
7301 /* Store this field into a union of the proper type. */
7302 store_field (target,
7303 MIN ((int_size_in_bytes (TREE_TYPE
7304 (TREE_OPERAND (exp, 0)))
7306 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7307 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7308 VOIDmode, 0, BITS_PER_UNIT,
7309 int_size_in_bytes (type), 0);
7313 /* Return the entire union. */
7317 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7319 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7322 /* If the signedness of the conversion differs and OP0 is
7323 a promoted SUBREG, clear that indication since we now
7324 have to do the proper extension. */
7325 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7326 && GET_CODE (op0) == SUBREG)
7327 SUBREG_PROMOTED_VAR_P (op0) = 0;
7332 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7333 if (GET_MODE (op0) == mode)
7336 /* If OP0 is a constant, just convert it into the proper mode. */
7337 if (CONSTANT_P (op0))
7339 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7340 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7342 if (modifier == EXPAND_INITIALIZER)
7343 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7347 convert_to_mode (mode, op0,
7348 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7350 convert_move (target, op0,
7351 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7355 /* We come here from MINUS_EXPR when the second operand is a
7358 this_optab = ! unsignedp && flag_trapv
7359 && (GET_MODE_CLASS(mode) == MODE_INT)
7360 ? addv_optab : add_optab;
7362 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7363 something else, make sure we add the register to the constant and
7364 then to the other thing. This case can occur during strength
7365 reduction and doing it this way will produce better code if the
7366 frame pointer or argument pointer is eliminated.
7368 fold-const.c will ensure that the constant is always in the inner
7369 PLUS_EXPR, so the only case we need to do anything about is if
7370 sp, ap, or fp is our second argument, in which case we must swap
7371 the innermost first argument and our second argument. */
7373 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7374 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7375 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7376 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7377 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7378 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7380 tree t = TREE_OPERAND (exp, 1);
7382 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7383 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7386 /* If the result is to be ptr_mode and we are adding an integer to
7387 something, we might be forming a constant. So try to use
7388 plus_constant. If it produces a sum and we can't accept it,
7389 use force_operand. This allows P = &ARR[const] to generate
7390 efficient code on machines where a SYMBOL_REF is not a valid
7393 If this is an EXPAND_SUM call, always return the sum. */
7394 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7395 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7397 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7398 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7399 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7403 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7405 /* Use immed_double_const to ensure that the constant is
7406 truncated according to the mode of OP1, then sign extended
7407 to a HOST_WIDE_INT. Using the constant directly can result
7408 in non-canonical RTL in a 64x32 cross compile. */
7410 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7412 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7413 op1 = plus_constant (op1, INTVAL (constant_part));
7414 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7415 op1 = force_operand (op1, target);
7419 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7420 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7421 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7425 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7427 if (! CONSTANT_P (op0))
7429 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7430 VOIDmode, modifier);
7431 /* Don't go to both_summands if modifier
7432 says it's not right to return a PLUS. */
7433 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7437 /* Use immed_double_const to ensure that the constant is
7438 truncated according to the mode of OP1, then sign extended
7439 to a HOST_WIDE_INT. Using the constant directly can result
7440 in non-canonical RTL in a 64x32 cross compile. */
7442 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7444 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7445 op0 = plus_constant (op0, INTVAL (constant_part));
7446 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7447 op0 = force_operand (op0, target);
7452 /* No sense saving up arithmetic to be done
7453 if it's all in the wrong mode to form part of an address.
7454 And force_operand won't know whether to sign-extend or
7456 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7457 || mode != ptr_mode)
7460 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7463 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7464 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7467 /* Make sure any term that's a sum with a constant comes last. */
7468 if (GET_CODE (op0) == PLUS
7469 && CONSTANT_P (XEXP (op0, 1)))
7475 /* If adding to a sum including a constant,
7476 associate it to put the constant outside. */
7477 if (GET_CODE (op1) == PLUS
7478 && CONSTANT_P (XEXP (op1, 1)))
7480 rtx constant_term = const0_rtx;
7482 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7485 /* Ensure that MULT comes first if there is one. */
7486 else if (GET_CODE (op0) == MULT)
7487 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7489 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7491 /* Let's also eliminate constants from op0 if possible. */
7492 op0 = eliminate_constant_term (op0, &constant_term);
7494 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7495 their sum should be a constant. Form it into OP1, since the
7496 result we want will then be OP0 + OP1. */
7498 temp = simplify_binary_operation (PLUS, mode, constant_term,
7503 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7506 /* Put a constant term last and put a multiplication first. */
7507 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7508 temp = op1, op1 = op0, op0 = temp;
7510 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7511 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7514 /* For initializers, we are allowed to return a MINUS of two
7515 symbolic constants. Here we handle all cases when both operands
7517 /* Handle difference of two symbolic constants,
7518 for the sake of an initializer. */
7519 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7520 && really_constant_p (TREE_OPERAND (exp, 0))
7521 && really_constant_p (TREE_OPERAND (exp, 1)))
7523 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7524 VOIDmode, ro_modifier);
7525 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7526 VOIDmode, ro_modifier);
7528 /* If the last operand is a CONST_INT, use plus_constant of
7529 the negated constant. Else make the MINUS. */
7530 if (GET_CODE (op1) == CONST_INT)
7531 return plus_constant (op0, - INTVAL (op1));
7533 return gen_rtx_MINUS (mode, op0, op1);
7535 /* Convert A - const to A + (-const). */
7536 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7538 tree negated = fold (build1 (NEGATE_EXPR, type,
7539 TREE_OPERAND (exp, 1)));
7541 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7542 /* If we can't negate the constant in TYPE, leave it alone and
7543 expand_binop will negate it for us. We used to try to do it
7544 here in the signed version of TYPE, but that doesn't work
7545 on POINTER_TYPEs. */;
7548 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7552 this_optab = ! unsignedp && flag_trapv
7553 && (GET_MODE_CLASS(mode) == MODE_INT)
7554 ? subv_optab : sub_optab;
7558 /* If first operand is constant, swap them.
7559 Thus the following special case checks need only
7560 check the second operand. */
7561 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7563 register tree t1 = TREE_OPERAND (exp, 0);
7564 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7565 TREE_OPERAND (exp, 1) = t1;
7568 /* Attempt to return something suitable for generating an
7569 indexed address, for machines that support that. */
7571 if (modifier == EXPAND_SUM && mode == ptr_mode
7572 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7573 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7575 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7578 /* Apply distributive law if OP0 is x+c. */
7579 if (GET_CODE (op0) == PLUS
7580 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7585 (mode, XEXP (op0, 0),
7586 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7587 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7588 * INTVAL (XEXP (op0, 1))));
7590 if (GET_CODE (op0) != REG)
7591 op0 = force_operand (op0, NULL_RTX);
7592 if (GET_CODE (op0) != REG)
7593 op0 = copy_to_mode_reg (mode, op0);
7596 gen_rtx_MULT (mode, op0,
7597 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7600 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7603 /* Check for multiplying things that have been extended
7604 from a narrower type. If this machine supports multiplying
7605 in that narrower type with a result in the desired type,
7606 do it that way, and avoid the explicit type-conversion. */
7607 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7608 && TREE_CODE (type) == INTEGER_TYPE
7609 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7610 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7611 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7612 && int_fits_type_p (TREE_OPERAND (exp, 1),
7613 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7614 /* Don't use a widening multiply if a shift will do. */
7615 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7616 > HOST_BITS_PER_WIDE_INT)
7617 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7619 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7620 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7622 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7623 /* If both operands are extended, they must either both
7624 be zero-extended or both be sign-extended. */
7625 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7627 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7629 enum machine_mode innermode
7630 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7631 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7632 ? smul_widen_optab : umul_widen_optab);
7633 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7634 ? umul_widen_optab : smul_widen_optab);
7635 if (mode == GET_MODE_WIDER_MODE (innermode))
7637 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7639 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7640 NULL_RTX, VOIDmode, 0);
7641 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7642 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7645 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7646 NULL_RTX, VOIDmode, 0);
7649 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7650 && innermode == word_mode)
7653 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7654 NULL_RTX, VOIDmode, 0);
7655 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7656 op1 = convert_modes (innermode, mode,
7657 expand_expr (TREE_OPERAND (exp, 1),
7658 NULL_RTX, VOIDmode, 0),
7661 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7662 NULL_RTX, VOIDmode, 0);
7663 temp = expand_binop (mode, other_optab, op0, op1, target,
7664 unsignedp, OPTAB_LIB_WIDEN);
7665 htem = expand_mult_highpart_adjust (innermode,
7666 gen_highpart (innermode, temp),
7668 gen_highpart (innermode, temp),
7670 emit_move_insn (gen_highpart (innermode, temp), htem);
7675 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7676 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7677 return expand_mult (mode, op0, op1, target, unsignedp);
7679 case TRUNC_DIV_EXPR:
7680 case FLOOR_DIV_EXPR:
7682 case ROUND_DIV_EXPR:
7683 case EXACT_DIV_EXPR:
7684 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7686 /* Possible optimization: compute the dividend with EXPAND_SUM
7687 then if the divisor is constant can optimize the case
7688 where some terms of the dividend have coeffs divisible by it. */
7689 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7690 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7691 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7694 this_optab = flodiv_optab;
7697 case TRUNC_MOD_EXPR:
7698 case FLOOR_MOD_EXPR:
7700 case ROUND_MOD_EXPR:
7701 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7703 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7704 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7705 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7707 case FIX_ROUND_EXPR:
7708 case FIX_FLOOR_EXPR:
7710 abort (); /* Not used for C. */
7712 case FIX_TRUNC_EXPR:
7713 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7715 target = gen_reg_rtx (mode);
7716 expand_fix (target, op0, unsignedp);
7720 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7722 target = gen_reg_rtx (mode);
7723 /* expand_float can't figure out what to do if FROM has VOIDmode.
7724 So give it the correct mode. With -O, cse will optimize this. */
7725 if (GET_MODE (op0) == VOIDmode)
7726 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7728 expand_float (target, op0,
7729 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7733 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7734 temp = expand_unop (mode,
7735 ! unsignedp && flag_trapv
7736 && (GET_MODE_CLASS(mode) == MODE_INT)
7737 ? negv_optab : neg_optab, op0, target, 0);
7743 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7745 /* Handle complex values specially. */
7746 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7747 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7748 return expand_complex_abs (mode, op0, target, unsignedp);
7750 /* Unsigned abs is simply the operand. Testing here means we don't
7751 risk generating incorrect code below. */
7752 if (TREE_UNSIGNED (type))
7755 return expand_abs (mode, op0, target, unsignedp,
7756 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7760 target = original_target;
7761 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7762 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7763 || GET_MODE (target) != mode
7764 || (GET_CODE (target) == REG
7765 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7766 target = gen_reg_rtx (mode);
7767 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7768 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7770 /* First try to do it with a special MIN or MAX instruction.
7771 If that does not win, use a conditional jump to select the proper
7773 this_optab = (TREE_UNSIGNED (type)
7774 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7775 : (code == MIN_EXPR ? smin_optab : smax_optab));
7777 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7782 /* At this point, a MEM target is no longer useful; we will get better
7785 if (GET_CODE (target) == MEM)
7786 target = gen_reg_rtx (mode);
7789 emit_move_insn (target, op0);
7791 op0 = gen_label_rtx ();
7793 /* If this mode is an integer too wide to compare properly,
7794 compare word by word. Rely on cse to optimize constant cases. */
7795 if (GET_MODE_CLASS (mode) == MODE_INT
7796 && ! can_compare_p (GE, mode, ccp_jump))
7798 if (code == MAX_EXPR)
7799 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7800 target, op1, NULL_RTX, op0);
7802 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7803 op1, target, NULL_RTX, op0);
7807 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7808 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7809 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7812 emit_move_insn (target, op1);
7817 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7818 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7824 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7825 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7830 /* ??? Can optimize bitwise operations with one arg constant.
7831 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7832 and (a bitwise1 b) bitwise2 b (etc)
7833 but that is probably not worth while. */
7835 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7836 boolean values when we want in all cases to compute both of them. In
7837 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7838 as actual zero-or-1 values and then bitwise anding. In cases where
7839 there cannot be any side effects, better code would be made by
7840 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7841 how to recognize those cases. */
7843 case TRUTH_AND_EXPR:
7845 this_optab = and_optab;
7850 this_optab = ior_optab;
7853 case TRUTH_XOR_EXPR:
7855 this_optab = xor_optab;
7862 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7864 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7865 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7868 /* Could determine the answer when only additive constants differ. Also,
7869 the addition of one can be handled by changing the condition. */
7876 case UNORDERED_EXPR:
7883 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7887 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7888 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7890 && GET_CODE (original_target) == REG
7891 && (GET_MODE (original_target)
7892 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7894 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7897 if (temp != original_target)
7898 temp = copy_to_reg (temp);
7900 op1 = gen_label_rtx ();
7901 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7902 GET_MODE (temp), unsignedp, 0, op1);
7903 emit_move_insn (temp, const1_rtx);
7908 /* If no set-flag instruction, must generate a conditional
7909 store into a temporary variable. Drop through
7910 and handle this like && and ||. */
7912 case TRUTH_ANDIF_EXPR:
7913 case TRUTH_ORIF_EXPR:
7915 && (target == 0 || ! safe_from_p (target, exp, 1)
7916 /* Make sure we don't have a hard reg (such as function's return
7917 value) live across basic blocks, if not optimizing. */
7918 || (!optimize && GET_CODE (target) == REG
7919 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7920 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7923 emit_clr_insn (target);
7925 op1 = gen_label_rtx ();
7926 jumpifnot (exp, op1);
7929 emit_0_to_1_insn (target);
7932 return ignore ? const0_rtx : target;
7934 case TRUTH_NOT_EXPR:
7935 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7936 /* The parser is careful to generate TRUTH_NOT_EXPR
7937 only with operands that are always zero or one. */
7938 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7939 target, 1, OPTAB_LIB_WIDEN);
7945 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7947 return expand_expr (TREE_OPERAND (exp, 1),
7948 (ignore ? const0_rtx : target),
7952 /* If we would have a "singleton" (see below) were it not for a
7953 conversion in each arm, bring that conversion back out. */
7954 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7955 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7956 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7957 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7959 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7960 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7962 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7963 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7964 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7965 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7966 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7967 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7968 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7969 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7970 return expand_expr (build1 (NOP_EXPR, type,
7971 build (COND_EXPR, TREE_TYPE (true),
7972 TREE_OPERAND (exp, 0),
7974 target, tmode, modifier);
7978 /* Note that COND_EXPRs whose type is a structure or union
7979 are required to be constructed to contain assignments of
7980 a temporary variable, so that we can evaluate them here
7981 for side effect only. If type is void, we must do likewise. */
7983 /* If an arm of the branch requires a cleanup,
7984 only that cleanup is performed. */
7987 tree binary_op = 0, unary_op = 0;
7989 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7990 convert it to our mode, if necessary. */
7991 if (integer_onep (TREE_OPERAND (exp, 1))
7992 && integer_zerop (TREE_OPERAND (exp, 2))
7993 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7997 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8002 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8003 if (GET_MODE (op0) == mode)
8007 target = gen_reg_rtx (mode);
8008 convert_move (target, op0, unsignedp);
8012 /* Check for X ? A + B : A. If we have this, we can copy A to the
8013 output and conditionally add B. Similarly for unary operations.
8014 Don't do this if X has side-effects because those side effects
8015 might affect A or B and the "?" operation is a sequence point in
8016 ANSI. (operand_equal_p tests for side effects.) */
8018 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8019 && operand_equal_p (TREE_OPERAND (exp, 2),
8020 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8021 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8022 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8023 && operand_equal_p (TREE_OPERAND (exp, 1),
8024 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8025 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8026 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8027 && operand_equal_p (TREE_OPERAND (exp, 2),
8028 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8029 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8030 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8031 && operand_equal_p (TREE_OPERAND (exp, 1),
8032 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8033 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8035 /* If we are not to produce a result, we have no target. Otherwise,
8036 if a target was specified use it; it will not be used as an
8037 intermediate target unless it is safe. If no target, use a
8042 else if (original_target
8043 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8044 || (singleton && GET_CODE (original_target) == REG
8045 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8046 && original_target == var_rtx (singleton)))
8047 && GET_MODE (original_target) == mode
8048 #ifdef HAVE_conditional_move
8049 && (! can_conditionally_move_p (mode)
8050 || GET_CODE (original_target) == REG
8051 || TREE_ADDRESSABLE (type))
8053 && ! (GET_CODE (original_target) == MEM
8054 && MEM_VOLATILE_P (original_target)))
8055 temp = original_target;
8056 else if (TREE_ADDRESSABLE (type))
8059 temp = assign_temp (type, 0, 0, 1);
8061 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8062 do the test of X as a store-flag operation, do this as
8063 A + ((X != 0) << log C). Similarly for other simple binary
8064 operators. Only do for C == 1 if BRANCH_COST is low. */
8065 if (temp && singleton && binary_op
8066 && (TREE_CODE (binary_op) == PLUS_EXPR
8067 || TREE_CODE (binary_op) == MINUS_EXPR
8068 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8069 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8070 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8071 : integer_onep (TREE_OPERAND (binary_op, 1)))
8072 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8075 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8076 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8077 ? addv_optab : add_optab)
8078 : TREE_CODE (binary_op) == MINUS_EXPR
8079 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8080 ? subv_optab : sub_optab)
8081 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8084 /* If we had X ? A : A + 1, do this as A + (X == 0).
8086 We have to invert the truth value here and then put it
8087 back later if do_store_flag fails. We cannot simply copy
8088 TREE_OPERAND (exp, 0) to another variable and modify that
8089 because invert_truthvalue can modify the tree pointed to
8091 if (singleton == TREE_OPERAND (exp, 1))
8092 TREE_OPERAND (exp, 0)
8093 = invert_truthvalue (TREE_OPERAND (exp, 0));
8095 result = do_store_flag (TREE_OPERAND (exp, 0),
8096 (safe_from_p (temp, singleton, 1)
8098 mode, BRANCH_COST <= 1);
8100 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8101 result = expand_shift (LSHIFT_EXPR, mode, result,
8102 build_int_2 (tree_log2
8106 (safe_from_p (temp, singleton, 1)
8107 ? temp : NULL_RTX), 0);
8111 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8112 return expand_binop (mode, boptab, op1, result, temp,
8113 unsignedp, OPTAB_LIB_WIDEN);
8115 else if (singleton == TREE_OPERAND (exp, 1))
8116 TREE_OPERAND (exp, 0)
8117 = invert_truthvalue (TREE_OPERAND (exp, 0));
8120 do_pending_stack_adjust ();
8122 op0 = gen_label_rtx ();
8124 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8128 /* If the target conflicts with the other operand of the
8129 binary op, we can't use it. Also, we can't use the target
8130 if it is a hard register, because evaluating the condition
8131 might clobber it. */
8133 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8134 || (GET_CODE (temp) == REG
8135 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8136 temp = gen_reg_rtx (mode);
8137 store_expr (singleton, temp, 0);
8140 expand_expr (singleton,
8141 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8142 if (singleton == TREE_OPERAND (exp, 1))
8143 jumpif (TREE_OPERAND (exp, 0), op0);
8145 jumpifnot (TREE_OPERAND (exp, 0), op0);
8147 start_cleanup_deferral ();
8148 if (binary_op && temp == 0)
8149 /* Just touch the other operand. */
8150 expand_expr (TREE_OPERAND (binary_op, 1),
8151 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8153 store_expr (build (TREE_CODE (binary_op), type,
8154 make_tree (type, temp),
8155 TREE_OPERAND (binary_op, 1)),
8158 store_expr (build1 (TREE_CODE (unary_op), type,
8159 make_tree (type, temp)),
8163 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8164 comparison operator. If we have one of these cases, set the
8165 output to A, branch on A (cse will merge these two references),
8166 then set the output to FOO. */
8168 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8169 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8170 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8171 TREE_OPERAND (exp, 1), 0)
8172 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8173 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8174 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8176 if (GET_CODE (temp) == REG
8177 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8178 temp = gen_reg_rtx (mode);
8179 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8180 jumpif (TREE_OPERAND (exp, 0), op0);
8182 start_cleanup_deferral ();
8183 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8187 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8188 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8189 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8190 TREE_OPERAND (exp, 2), 0)
8191 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8192 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8193 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8195 if (GET_CODE (temp) == REG
8196 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8197 temp = gen_reg_rtx (mode);
8198 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8199 jumpifnot (TREE_OPERAND (exp, 0), op0);
8201 start_cleanup_deferral ();
8202 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8207 op1 = gen_label_rtx ();
8208 jumpifnot (TREE_OPERAND (exp, 0), op0);
8210 start_cleanup_deferral ();
8212 /* One branch of the cond can be void, if it never returns. For
8213 example A ? throw : E */
8215 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8216 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8218 expand_expr (TREE_OPERAND (exp, 1),
8219 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8220 end_cleanup_deferral ();
8222 emit_jump_insn (gen_jump (op1));
8225 start_cleanup_deferral ();
8227 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8228 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8230 expand_expr (TREE_OPERAND (exp, 2),
8231 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8234 end_cleanup_deferral ();
8245 /* Something needs to be initialized, but we didn't know
8246 where that thing was when building the tree. For example,
8247 it could be the return value of a function, or a parameter
8248 to a function which lays down in the stack, or a temporary
8249 variable which must be passed by reference.
8251 We guarantee that the expression will either be constructed
8252 or copied into our original target. */
8254 tree slot = TREE_OPERAND (exp, 0);
8255 tree cleanups = NULL_TREE;
8258 if (TREE_CODE (slot) != VAR_DECL)
8262 target = original_target;
8264 /* Set this here so that if we get a target that refers to a
8265 register variable that's already been used, put_reg_into_stack
8266 knows that it should fix up those uses. */
8267 TREE_USED (slot) = 1;
8271 if (DECL_RTL (slot) != 0)
8273 target = DECL_RTL (slot);
8274 /* If we have already expanded the slot, so don't do
8276 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8281 target = assign_temp (type, 2, 0, 1);
8282 /* All temp slots at this level must not conflict. */
8283 preserve_temp_slots (target);
8284 DECL_RTL (slot) = target;
8285 if (TREE_ADDRESSABLE (slot))
8286 put_var_into_stack (slot);
8288 /* Since SLOT is not known to the called function
8289 to belong to its stack frame, we must build an explicit
8290 cleanup. This case occurs when we must build up a reference
8291 to pass the reference as an argument. In this case,
8292 it is very likely that such a reference need not be
8295 if (TREE_OPERAND (exp, 2) == 0)
8296 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8297 cleanups = TREE_OPERAND (exp, 2);
8302 /* This case does occur, when expanding a parameter which
8303 needs to be constructed on the stack. The target
8304 is the actual stack address that we want to initialize.
8305 The function we call will perform the cleanup in this case. */
8307 /* If we have already assigned it space, use that space,
8308 not target that we were passed in, as our target
8309 parameter is only a hint. */
8310 if (DECL_RTL (slot) != 0)
8312 target = DECL_RTL (slot);
8313 /* If we have already expanded the slot, so don't do
8315 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8320 DECL_RTL (slot) = target;
8321 /* If we must have an addressable slot, then make sure that
8322 the RTL that we just stored in slot is OK. */
8323 if (TREE_ADDRESSABLE (slot))
8324 put_var_into_stack (slot);
8328 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8329 /* Mark it as expanded. */
8330 TREE_OPERAND (exp, 1) = NULL_TREE;
8332 store_expr (exp1, target, 0);
8334 expand_decl_cleanup (NULL_TREE, cleanups);
8341 tree lhs = TREE_OPERAND (exp, 0);
8342 tree rhs = TREE_OPERAND (exp, 1);
8343 tree noncopied_parts = 0;
8344 tree lhs_type = TREE_TYPE (lhs);
8346 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8347 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8348 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8349 TYPE_NONCOPIED_PARTS (lhs_type));
8350 while (noncopied_parts != 0)
8352 expand_assignment (TREE_VALUE (noncopied_parts),
8353 TREE_PURPOSE (noncopied_parts), 0, 0);
8354 noncopied_parts = TREE_CHAIN (noncopied_parts);
8361 /* If lhs is complex, expand calls in rhs before computing it.
8362 That's so we don't compute a pointer and save it over a call.
8363 If lhs is simple, compute it first so we can give it as a
8364 target if the rhs is just a call. This avoids an extra temp and copy
8365 and that prevents a partial-subsumption which makes bad code.
8366 Actually we could treat component_ref's of vars like vars. */
8368 tree lhs = TREE_OPERAND (exp, 0);
8369 tree rhs = TREE_OPERAND (exp, 1);
8370 tree noncopied_parts = 0;
8371 tree lhs_type = TREE_TYPE (lhs);
8375 if (TREE_CODE (lhs) != VAR_DECL
8376 && TREE_CODE (lhs) != RESULT_DECL
8377 && TREE_CODE (lhs) != PARM_DECL
8378 && ! (TREE_CODE (lhs) == INDIRECT_REF
8379 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8381 /* Check for |= or &= of a bitfield of size one into another bitfield
8382 of size 1. In this case, (unless we need the result of the
8383 assignment) we can do this more efficiently with a
8384 test followed by an assignment, if necessary.
8386 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8387 things change so we do, this code should be enhanced to
8390 && TREE_CODE (lhs) == COMPONENT_REF
8391 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8392 || TREE_CODE (rhs) == BIT_AND_EXPR)
8393 && TREE_OPERAND (rhs, 0) == lhs
8394 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8395 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8396 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8398 rtx label = gen_label_rtx ();
8400 do_jump (TREE_OPERAND (rhs, 1),
8401 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8402 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8403 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8404 (TREE_CODE (rhs) == BIT_IOR_EXPR
8406 : integer_zero_node)),
8408 do_pending_stack_adjust ();
8413 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8414 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8415 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8416 TYPE_NONCOPIED_PARTS (lhs_type));
8418 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8419 while (noncopied_parts != 0)
8421 expand_assignment (TREE_PURPOSE (noncopied_parts),
8422 TREE_VALUE (noncopied_parts), 0, 0);
8423 noncopied_parts = TREE_CHAIN (noncopied_parts);
8429 if (!TREE_OPERAND (exp, 0))
8430 expand_null_return ();
8432 expand_return (TREE_OPERAND (exp, 0));
8435 case PREINCREMENT_EXPR:
8436 case PREDECREMENT_EXPR:
8437 return expand_increment (exp, 0, ignore);
8439 case POSTINCREMENT_EXPR:
8440 case POSTDECREMENT_EXPR:
8441 /* Faster to treat as pre-increment if result is not used. */
8442 return expand_increment (exp, ! ignore, ignore);
8445 /* If nonzero, TEMP will be set to the address of something that might
8446 be a MEM corresponding to a stack slot. */
8449 /* Are we taking the address of a nested function? */
8450 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8451 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8452 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8453 && ! TREE_STATIC (exp))
8455 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8456 op0 = force_operand (op0, target);
8458 /* If we are taking the address of something erroneous, just
8460 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8464 /* We make sure to pass const0_rtx down if we came in with
8465 ignore set, to avoid doing the cleanups twice for something. */
8466 op0 = expand_expr (TREE_OPERAND (exp, 0),
8467 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8468 (modifier == EXPAND_INITIALIZER
8469 ? modifier : EXPAND_CONST_ADDRESS));
8471 /* If we are going to ignore the result, OP0 will have been set
8472 to const0_rtx, so just return it. Don't get confused and
8473 think we are taking the address of the constant. */
8477 op0 = protect_from_queue (op0, 0);
8479 /* We would like the object in memory. If it is a constant, we can
8480 have it be statically allocated into memory. For a non-constant,
8481 we need to allocate some memory and store the value into it. */
8483 if (CONSTANT_P (op0))
8484 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8486 else if (GET_CODE (op0) == MEM)
8488 mark_temp_addr_taken (op0);
8489 temp = XEXP (op0, 0);
8492 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8493 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8494 || GET_CODE (op0) == PARALLEL)
8496 /* If this object is in a register, it must be not
8498 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8499 tree nt = build_qualified_type (inner_type,
8500 (TYPE_QUALS (inner_type)
8501 | TYPE_QUAL_CONST));
8502 rtx memloc = assign_temp (nt, 1, 1, 1);
8504 mark_temp_addr_taken (memloc);
8505 if (GET_CODE (op0) == PARALLEL)
8506 /* Handle calls that pass values in multiple non-contiguous
8507 locations. The Irix 6 ABI has examples of this. */
8508 emit_group_store (memloc, op0,
8509 int_size_in_bytes (inner_type),
8510 TYPE_ALIGN (inner_type));
8512 emit_move_insn (memloc, op0);
8516 if (GET_CODE (op0) != MEM)
8519 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8521 temp = XEXP (op0, 0);
8522 #ifdef POINTERS_EXTEND_UNSIGNED
8523 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8524 && mode == ptr_mode)
8525 temp = convert_memory_address (ptr_mode, temp);
8530 op0 = force_operand (XEXP (op0, 0), target);
8533 if (flag_force_addr && GET_CODE (op0) != REG)
8534 op0 = force_reg (Pmode, op0);
8536 if (GET_CODE (op0) == REG
8537 && ! REG_USERVAR_P (op0))
8538 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8540 /* If we might have had a temp slot, add an equivalent address
8543 update_temp_slot_address (temp, op0);
8545 #ifdef POINTERS_EXTEND_UNSIGNED
8546 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8547 && mode == ptr_mode)
8548 op0 = convert_memory_address (ptr_mode, op0);
8553 case ENTRY_VALUE_EXPR:
8556 /* COMPLEX type for Extended Pascal & Fortran */
8559 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8562 /* Get the rtx code of the operands. */
8563 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8564 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8567 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8571 /* Move the real (op0) and imaginary (op1) parts to their location. */
8572 emit_move_insn (gen_realpart (mode, target), op0);
8573 emit_move_insn (gen_imagpart (mode, target), op1);
8575 insns = get_insns ();
8578 /* Complex construction should appear as a single unit. */
8579 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8580 each with a separate pseudo as destination.
8581 It's not correct for flow to treat them as a unit. */
8582 if (GET_CODE (target) != CONCAT)
8583 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8591 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8592 return gen_realpart (mode, op0);
8595 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8596 return gen_imagpart (mode, op0);
8600 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8604 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8607 target = gen_reg_rtx (mode);
8611 /* Store the realpart and the negated imagpart to target. */
8612 emit_move_insn (gen_realpart (partmode, target),
8613 gen_realpart (partmode, op0));
8615 imag_t = gen_imagpart (partmode, target);
8616 temp = expand_unop (partmode,
8617 ! unsignedp && flag_trapv
8618 && (GET_MODE_CLASS(partmode) == MODE_INT)
8619 ? negv_optab : neg_optab,
8620 gen_imagpart (partmode, op0), imag_t, 0);
8622 emit_move_insn (imag_t, temp);
8624 insns = get_insns ();
8627 /* Conjugate should appear as a single unit
8628 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8629 each with a separate pseudo as destination.
8630 It's not correct for flow to treat them as a unit. */
8631 if (GET_CODE (target) != CONCAT)
8632 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8639 case TRY_CATCH_EXPR:
8641 tree handler = TREE_OPERAND (exp, 1);
8643 expand_eh_region_start ();
8645 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8647 expand_eh_region_end (handler);
8652 case TRY_FINALLY_EXPR:
8654 tree try_block = TREE_OPERAND (exp, 0);
8655 tree finally_block = TREE_OPERAND (exp, 1);
8656 rtx finally_label = gen_label_rtx ();
8657 rtx done_label = gen_label_rtx ();
8658 rtx return_link = gen_reg_rtx (Pmode);
8659 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8660 (tree) finally_label, (tree) return_link);
8661 TREE_SIDE_EFFECTS (cleanup) = 1;
8663 /* Start a new binding layer that will keep track of all cleanup
8664 actions to be performed. */
8665 expand_start_bindings (2);
8667 target_temp_slot_level = temp_slot_level;
8669 expand_decl_cleanup (NULL_TREE, cleanup);
8670 op0 = expand_expr (try_block, target, tmode, modifier);
8672 preserve_temp_slots (op0);
8673 expand_end_bindings (NULL_TREE, 0, 0);
8674 emit_jump (done_label);
8675 emit_label (finally_label);
8676 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8677 emit_indirect_jump (return_link);
8678 emit_label (done_label);
8682 case GOTO_SUBROUTINE_EXPR:
8684 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8685 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8686 rtx return_address = gen_label_rtx ();
8687 emit_move_insn (return_link,
8688 gen_rtx_LABEL_REF (Pmode, return_address));
8690 emit_label (return_address);
8696 rtx dcc = get_dynamic_cleanup_chain ();
8697 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8703 rtx dhc = get_dynamic_handler_chain ();
8704 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8709 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8712 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8715 /* Here to do an ordinary binary operator, generating an instruction
8716 from the optab already placed in `this_optab'. */
8718 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8720 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8721 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8723 temp = expand_binop (mode, this_optab, op0, op1, target,
8724 unsignedp, OPTAB_LIB_WIDEN);
8730 /* Similar to expand_expr, except that we don't specify a target, target
8731 mode, or modifier and we return the alignment of the inner type. This is
8732 used in cases where it is not necessary to align the result to the
8733 alignment of its type as long as we know the alignment of the result, for
8734 example for comparisons of BLKmode values. */
8737 expand_expr_unaligned (exp, palign)
8739 unsigned int *palign;
8742 tree type = TREE_TYPE (exp);
8743 register enum machine_mode mode = TYPE_MODE (type);
8745 /* Default the alignment we return to that of the type. */
8746 *palign = TYPE_ALIGN (type);
8748 /* The only cases in which we do anything special is if the resulting mode
8750 if (mode != BLKmode)
8751 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8753 switch (TREE_CODE (exp))
8757 case NON_LVALUE_EXPR:
8758 /* Conversions between BLKmode values don't change the underlying
8759 alignment or value. */
8760 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8761 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8765 /* Much of the code for this case is copied directly from expand_expr.
8766 We need to duplicate it here because we will do something different
8767 in the fall-through case, so we need to handle the same exceptions
8770 tree array = TREE_OPERAND (exp, 0);
8771 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8772 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8773 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8776 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8779 /* Optimize the special-case of a zero lower bound.
8781 We convert the low_bound to sizetype to avoid some problems
8782 with constant folding. (E.g. suppose the lower bound is 1,
8783 and its mode is QI. Without the conversion, (ARRAY
8784 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8785 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8787 if (! integer_zerop (low_bound))
8788 index = size_diffop (index, convert (sizetype, low_bound));
8790 /* If this is a constant index into a constant array,
8791 just get the value from the array. Handle both the cases when
8792 we have an explicit constructor and when our operand is a variable
8793 that was declared const. */
8795 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8796 && host_integerp (index, 0)
8797 && 0 > compare_tree_int (index,
8798 list_length (CONSTRUCTOR_ELTS
8799 (TREE_OPERAND (exp, 0)))))
8803 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8804 i = tree_low_cst (index, 0);
8805 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8809 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8812 else if (optimize >= 1
8813 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8814 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8815 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8817 if (TREE_CODE (index) == INTEGER_CST)
8819 tree init = DECL_INITIAL (array);
8821 if (TREE_CODE (init) == CONSTRUCTOR)
8825 for (elem = CONSTRUCTOR_ELTS (init);
8826 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8827 elem = TREE_CHAIN (elem))
8831 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8841 /* If the operand is a CONSTRUCTOR, we can just extract the
8842 appropriate field if it is present. Don't do this if we have
8843 already written the data since we want to refer to that copy
8844 and varasm.c assumes that's what we'll do. */
8845 if (TREE_CODE (exp) != ARRAY_REF
8846 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8847 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8851 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8852 elt = TREE_CHAIN (elt))
8853 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8854 /* Note that unlike the case in expand_expr, we know this is
8855 BLKmode and hence not an integer. */
8856 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8860 enum machine_mode mode1;
8861 HOST_WIDE_INT bitsize, bitpos;
8864 unsigned int alignment;
8866 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8867 &mode1, &unsignedp, &volatilep,
8870 /* If we got back the original object, something is wrong. Perhaps
8871 we are evaluating an expression too early. In any event, don't
8872 infinitely recurse. */
8876 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8878 /* If this is a constant, put it into a register if it is a
8879 legitimate constant and OFFSET is 0 and memory if it isn't. */
8880 if (CONSTANT_P (op0))
8882 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8884 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8886 op0 = force_reg (inner_mode, op0);
8888 op0 = validize_mem (force_const_mem (inner_mode, op0));
8893 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8895 /* If this object is in a register, put it into memory.
8896 This case can't occur in C, but can in Ada if we have
8897 unchecked conversion of an expression from a scalar type to
8898 an array or record type. */
8899 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8900 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8902 tree nt = build_qualified_type (TREE_TYPE (tem),
8903 (TYPE_QUALS (TREE_TYPE (tem))
8904 | TYPE_QUAL_CONST));
8905 rtx memloc = assign_temp (nt, 1, 1, 1);
8907 mark_temp_addr_taken (memloc);
8908 emit_move_insn (memloc, op0);
8912 if (GET_CODE (op0) != MEM)
8915 if (GET_MODE (offset_rtx) != ptr_mode)
8917 #ifdef POINTERS_EXTEND_UNSIGNED
8918 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8920 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8924 op0 = change_address (op0, VOIDmode,
8925 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8926 force_reg (ptr_mode,
8930 /* Don't forget about volatility even if this is a bitfield. */
8931 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8933 op0 = copy_rtx (op0);
8934 MEM_VOLATILE_P (op0) = 1;
8937 /* Check the access. */
8938 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8943 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8944 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8946 /* Check the access right of the pointer. */
8947 in_check_memory_usage = 1;
8948 if (size > BITS_PER_UNIT)
8949 emit_library_call (chkr_check_addr_libfunc,
8950 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
8951 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8952 TYPE_MODE (sizetype),
8953 GEN_INT (MEMORY_USE_RO),
8954 TYPE_MODE (integer_type_node));
8955 in_check_memory_usage = 0;
8958 /* In cases where an aligned union has an unaligned object
8959 as a field, we might be extracting a BLKmode value from
8960 an integer-mode (e.g., SImode) object. Handle this case
8961 by doing the extract into an object as wide as the field
8962 (which we know to be the width of a basic mode), then
8963 storing into memory, and changing the mode to BLKmode.
8964 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8965 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8966 if (mode1 == VOIDmode
8967 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8968 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8969 && (TYPE_ALIGN (type) > alignment
8970 || bitpos % TYPE_ALIGN (type) != 0)))
8972 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8974 if (ext_mode == BLKmode)
8976 /* In this case, BITPOS must start at a byte boundary. */
8977 if (GET_CODE (op0) != MEM
8978 || bitpos % BITS_PER_UNIT != 0)
8981 op0 = change_address (op0, VOIDmode,
8982 plus_constant (XEXP (op0, 0),
8983 bitpos / BITS_PER_UNIT));
8987 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
8989 rtx new = assign_temp (nt, 0, 1, 1);
8991 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8992 unsignedp, NULL_RTX, ext_mode,
8993 ext_mode, alignment,
8994 int_size_in_bytes (TREE_TYPE (tem)));
8996 /* If the result is a record type and BITSIZE is narrower than
8997 the mode of OP0, an integral mode, and this is a big endian
8998 machine, we must put the field into the high-order bits. */
8999 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9000 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9001 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9002 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9003 size_int (GET_MODE_BITSIZE
9008 emit_move_insn (new, op0);
9009 op0 = copy_rtx (new);
9010 PUT_MODE (op0, BLKmode);
9014 /* Get a reference to just this component. */
9015 op0 = change_address (op0, mode1,
9016 plus_constant (XEXP (op0, 0),
9017 (bitpos / BITS_PER_UNIT)));
9019 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9021 /* Adjust the alignment in case the bit position is not
9022 a multiple of the alignment of the inner object. */
9023 while (bitpos % alignment != 0)
9026 if (GET_CODE (XEXP (op0, 0)) == REG)
9027 mark_reg_pointer (XEXP (op0, 0), alignment);
9029 MEM_IN_STRUCT_P (op0) = 1;
9030 MEM_VOLATILE_P (op0) |= volatilep;
9032 *palign = alignment;
9041 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9044 /* Return the tree node if a ARG corresponds to a string constant or zero
9045 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9046 in bytes within the string that ARG is accessing. The type of the
9047 offset will be `sizetype'. */
9050 string_constant (arg, ptr_offset)
9056 if (TREE_CODE (arg) == ADDR_EXPR
9057 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9059 *ptr_offset = size_zero_node;
9060 return TREE_OPERAND (arg, 0);
9062 else if (TREE_CODE (arg) == PLUS_EXPR)
9064 tree arg0 = TREE_OPERAND (arg, 0);
9065 tree arg1 = TREE_OPERAND (arg, 1);
9070 if (TREE_CODE (arg0) == ADDR_EXPR
9071 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9073 *ptr_offset = convert (sizetype, arg1);
9074 return TREE_OPERAND (arg0, 0);
9076 else if (TREE_CODE (arg1) == ADDR_EXPR
9077 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9079 *ptr_offset = convert (sizetype, arg0);
9080 return TREE_OPERAND (arg1, 0);
9087 /* Expand code for a post- or pre- increment or decrement
9088 and return the RTX for the result.
9089 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9092 expand_increment (exp, post, ignore)
9096 register rtx op0, op1;
9097 register rtx temp, value;
9098 register tree incremented = TREE_OPERAND (exp, 0);
9099 optab this_optab = add_optab;
9101 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9102 int op0_is_copy = 0;
9103 int single_insn = 0;
9104 /* 1 means we can't store into OP0 directly,
9105 because it is a subreg narrower than a word,
9106 and we don't dare clobber the rest of the word. */
9109 /* Stabilize any component ref that might need to be
9110 evaluated more than once below. */
9112 || TREE_CODE (incremented) == BIT_FIELD_REF
9113 || (TREE_CODE (incremented) == COMPONENT_REF
9114 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9115 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9116 incremented = stabilize_reference (incremented);
9117 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9118 ones into save exprs so that they don't accidentally get evaluated
9119 more than once by the code below. */
9120 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9121 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9122 incremented = save_expr (incremented);
9124 /* Compute the operands as RTX.
9125 Note whether OP0 is the actual lvalue or a copy of it:
9126 I believe it is a copy iff it is a register or subreg
9127 and insns were generated in computing it. */
9129 temp = get_last_insn ();
9130 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9132 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9133 in place but instead must do sign- or zero-extension during assignment,
9134 so we copy it into a new register and let the code below use it as
9137 Note that we can safely modify this SUBREG since it is know not to be
9138 shared (it was made by the expand_expr call above). */
9140 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9143 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9147 else if (GET_CODE (op0) == SUBREG
9148 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9150 /* We cannot increment this SUBREG in place. If we are
9151 post-incrementing, get a copy of the old value. Otherwise,
9152 just mark that we cannot increment in place. */
9154 op0 = copy_to_reg (op0);
9159 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9160 && temp != get_last_insn ());
9161 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9162 EXPAND_MEMORY_USE_BAD);
9164 /* Decide whether incrementing or decrementing. */
9165 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9166 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9167 this_optab = sub_optab;
9169 /* Convert decrement by a constant into a negative increment. */
9170 if (this_optab == sub_optab
9171 && GET_CODE (op1) == CONST_INT)
9173 op1 = GEN_INT (-INTVAL (op1));
9174 this_optab = add_optab;
9177 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9178 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9180 /* For a preincrement, see if we can do this with a single instruction. */
9183 icode = (int) this_optab->handlers[(int) mode].insn_code;
9184 if (icode != (int) CODE_FOR_nothing
9185 /* Make sure that OP0 is valid for operands 0 and 1
9186 of the insn we want to queue. */
9187 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9188 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9189 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9193 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9194 then we cannot just increment OP0. We must therefore contrive to
9195 increment the original value. Then, for postincrement, we can return
9196 OP0 since it is a copy of the old value. For preincrement, expand here
9197 unless we can do it with a single insn.
9199 Likewise if storing directly into OP0 would clobber high bits
9200 we need to preserve (bad_subreg). */
9201 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9203 /* This is the easiest way to increment the value wherever it is.
9204 Problems with multiple evaluation of INCREMENTED are prevented
9205 because either (1) it is a component_ref or preincrement,
9206 in which case it was stabilized above, or (2) it is an array_ref
9207 with constant index in an array in a register, which is
9208 safe to reevaluate. */
9209 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9210 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9211 ? MINUS_EXPR : PLUS_EXPR),
9214 TREE_OPERAND (exp, 1));
9216 while (TREE_CODE (incremented) == NOP_EXPR
9217 || TREE_CODE (incremented) == CONVERT_EXPR)
9219 newexp = convert (TREE_TYPE (incremented), newexp);
9220 incremented = TREE_OPERAND (incremented, 0);
9223 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9224 return post ? op0 : temp;
9229 /* We have a true reference to the value in OP0.
9230 If there is an insn to add or subtract in this mode, queue it.
9231 Queueing the increment insn avoids the register shuffling
9232 that often results if we must increment now and first save
9233 the old value for subsequent use. */
9235 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9236 op0 = stabilize (op0);
9239 icode = (int) this_optab->handlers[(int) mode].insn_code;
9240 if (icode != (int) CODE_FOR_nothing
9241 /* Make sure that OP0 is valid for operands 0 and 1
9242 of the insn we want to queue. */
9243 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9244 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9246 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9247 op1 = force_reg (mode, op1);
9249 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9251 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9253 rtx addr = (general_operand (XEXP (op0, 0), mode)
9254 ? force_reg (Pmode, XEXP (op0, 0))
9255 : copy_to_reg (XEXP (op0, 0)));
9258 op0 = change_address (op0, VOIDmode, addr);
9259 temp = force_reg (GET_MODE (op0), op0);
9260 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9261 op1 = force_reg (mode, op1);
9263 /* The increment queue is LIFO, thus we have to `queue'
9264 the instructions in reverse order. */
9265 enqueue_insn (op0, gen_move_insn (op0, temp));
9266 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9271 /* Preincrement, or we can't increment with one simple insn. */
9273 /* Save a copy of the value before inc or dec, to return it later. */
9274 temp = value = copy_to_reg (op0);
9276 /* Arrange to return the incremented value. */
9277 /* Copy the rtx because expand_binop will protect from the queue,
9278 and the results of that would be invalid for us to return
9279 if our caller does emit_queue before using our result. */
9280 temp = copy_rtx (value = op0);
9282 /* Increment however we can. */
9283 op1 = expand_binop (mode, this_optab, value, op1,
9284 current_function_check_memory_usage ? NULL_RTX : op0,
9285 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9286 /* Make sure the value is stored into OP0. */
9288 emit_move_insn (op0, op1);
9293 /* At the start of a function, record that we have no previously-pushed
9294 arguments waiting to be popped. */
9297 init_pending_stack_adjust ()
9299 pending_stack_adjust = 0;
9302 /* When exiting from function, if safe, clear out any pending stack adjust
9303 so the adjustment won't get done.
9305 Note, if the current function calls alloca, then it must have a
9306 frame pointer regardless of the value of flag_omit_frame_pointer. */
9309 clear_pending_stack_adjust ()
9311 #ifdef EXIT_IGNORE_STACK
9313 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9314 && EXIT_IGNORE_STACK
9315 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9316 && ! flag_inline_functions)
9318 stack_pointer_delta -= pending_stack_adjust,
9319 pending_stack_adjust = 0;
9324 /* Pop any previously-pushed arguments that have not been popped yet. */
9327 do_pending_stack_adjust ()
9329 if (inhibit_defer_pop == 0)
9331 if (pending_stack_adjust != 0)
9332 adjust_stack (GEN_INT (pending_stack_adjust));
9333 pending_stack_adjust = 0;
9337 /* Expand conditional expressions. */
9339 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9340 LABEL is an rtx of code CODE_LABEL, in this function and all the
9344 jumpifnot (exp, label)
9348 do_jump (exp, label, NULL_RTX);
9351 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9358 do_jump (exp, NULL_RTX, label);
9361 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9362 the result is zero, or IF_TRUE_LABEL if the result is one.
9363 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9364 meaning fall through in that case.
9366 do_jump always does any pending stack adjust except when it does not
9367 actually perform a jump. An example where there is no jump
9368 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9370 This function is responsible for optimizing cases such as
9371 &&, || and comparison operators in EXP. */
9374 do_jump (exp, if_false_label, if_true_label)
9376 rtx if_false_label, if_true_label;
9378 register enum tree_code code = TREE_CODE (exp);
9379 /* Some cases need to create a label to jump to
9380 in order to properly fall through.
9381 These cases set DROP_THROUGH_LABEL nonzero. */
9382 rtx drop_through_label = 0;
9386 enum machine_mode mode;
9388 #ifdef MAX_INTEGER_COMPUTATION_MODE
9389 check_max_integer_computation_mode (exp);
9400 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9406 /* This is not true with #pragma weak */
9408 /* The address of something can never be zero. */
9410 emit_jump (if_true_label);
9415 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9416 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9417 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9420 /* If we are narrowing the operand, we have to do the compare in the
9422 if ((TYPE_PRECISION (TREE_TYPE (exp))
9423 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9425 case NON_LVALUE_EXPR:
9426 case REFERENCE_EXPR:
9431 /* These cannot change zero->non-zero or vice versa. */
9432 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9435 case WITH_RECORD_EXPR:
9436 /* Put the object on the placeholder list, recurse through our first
9437 operand, and pop the list. */
9438 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9440 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9441 placeholder_list = TREE_CHAIN (placeholder_list);
9445 /* This is never less insns than evaluating the PLUS_EXPR followed by
9446 a test and can be longer if the test is eliminated. */
9448 /* Reduce to minus. */
9449 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9450 TREE_OPERAND (exp, 0),
9451 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9452 TREE_OPERAND (exp, 1))));
9453 /* Process as MINUS. */
9457 /* Non-zero iff operands of minus differ. */
9458 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9459 TREE_OPERAND (exp, 0),
9460 TREE_OPERAND (exp, 1)),
9461 NE, NE, if_false_label, if_true_label);
9465 /* If we are AND'ing with a small constant, do this comparison in the
9466 smallest type that fits. If the machine doesn't have comparisons
9467 that small, it will be converted back to the wider comparison.
9468 This helps if we are testing the sign bit of a narrower object.
9469 combine can't do this for us because it can't know whether a
9470 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9472 if (! SLOW_BYTE_ACCESS
9473 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9474 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9475 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9476 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9477 && (type = type_for_mode (mode, 1)) != 0
9478 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9479 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9480 != CODE_FOR_nothing))
9482 do_jump (convert (type, exp), if_false_label, if_true_label);
9487 case TRUTH_NOT_EXPR:
9488 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9491 case TRUTH_ANDIF_EXPR:
9492 if (if_false_label == 0)
9493 if_false_label = drop_through_label = gen_label_rtx ();
9494 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9495 start_cleanup_deferral ();
9496 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9497 end_cleanup_deferral ();
9500 case TRUTH_ORIF_EXPR:
9501 if (if_true_label == 0)
9502 if_true_label = drop_through_label = gen_label_rtx ();
9503 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9504 start_cleanup_deferral ();
9505 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9506 end_cleanup_deferral ();
9511 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9512 preserve_temp_slots (NULL_RTX);
9516 do_pending_stack_adjust ();
9517 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9524 HOST_WIDE_INT bitsize, bitpos;
9526 enum machine_mode mode;
9530 unsigned int alignment;
9532 /* Get description of this reference. We don't actually care
9533 about the underlying object here. */
9534 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9535 &unsignedp, &volatilep, &alignment);
9537 type = type_for_size (bitsize, unsignedp);
9538 if (! SLOW_BYTE_ACCESS
9539 && type != 0 && bitsize >= 0
9540 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9541 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9542 != CODE_FOR_nothing))
9544 do_jump (convert (type, exp), if_false_label, if_true_label);
9551 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9552 if (integer_onep (TREE_OPERAND (exp, 1))
9553 && integer_zerop (TREE_OPERAND (exp, 2)))
9554 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9556 else if (integer_zerop (TREE_OPERAND (exp, 1))
9557 && integer_onep (TREE_OPERAND (exp, 2)))
9558 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9562 register rtx label1 = gen_label_rtx ();
9563 drop_through_label = gen_label_rtx ();
9565 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9567 start_cleanup_deferral ();
9568 /* Now the THEN-expression. */
9569 do_jump (TREE_OPERAND (exp, 1),
9570 if_false_label ? if_false_label : drop_through_label,
9571 if_true_label ? if_true_label : drop_through_label);
9572 /* In case the do_jump just above never jumps. */
9573 do_pending_stack_adjust ();
9574 emit_label (label1);
9576 /* Now the ELSE-expression. */
9577 do_jump (TREE_OPERAND (exp, 2),
9578 if_false_label ? if_false_label : drop_through_label,
9579 if_true_label ? if_true_label : drop_through_label);
9580 end_cleanup_deferral ();
9586 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9588 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9589 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9591 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9592 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9595 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9596 fold (build (EQ_EXPR, TREE_TYPE (exp),
9597 fold (build1 (REALPART_EXPR,
9598 TREE_TYPE (inner_type),
9600 fold (build1 (REALPART_EXPR,
9601 TREE_TYPE (inner_type),
9603 fold (build (EQ_EXPR, TREE_TYPE (exp),
9604 fold (build1 (IMAGPART_EXPR,
9605 TREE_TYPE (inner_type),
9607 fold (build1 (IMAGPART_EXPR,
9608 TREE_TYPE (inner_type),
9610 if_false_label, if_true_label);
9613 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9614 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9616 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9617 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9618 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9620 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9626 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9628 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9629 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9631 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9632 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9635 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9636 fold (build (NE_EXPR, TREE_TYPE (exp),
9637 fold (build1 (REALPART_EXPR,
9638 TREE_TYPE (inner_type),
9640 fold (build1 (REALPART_EXPR,
9641 TREE_TYPE (inner_type),
9643 fold (build (NE_EXPR, TREE_TYPE (exp),
9644 fold (build1 (IMAGPART_EXPR,
9645 TREE_TYPE (inner_type),
9647 fold (build1 (IMAGPART_EXPR,
9648 TREE_TYPE (inner_type),
9650 if_false_label, if_true_label);
9653 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9654 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9656 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9657 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9658 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9660 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9665 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9666 if (GET_MODE_CLASS (mode) == MODE_INT
9667 && ! can_compare_p (LT, mode, ccp_jump))
9668 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9670 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9674 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9675 if (GET_MODE_CLASS (mode) == MODE_INT
9676 && ! can_compare_p (LE, mode, ccp_jump))
9677 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9679 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9683 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9684 if (GET_MODE_CLASS (mode) == MODE_INT
9685 && ! can_compare_p (GT, mode, ccp_jump))
9686 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9688 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9692 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9693 if (GET_MODE_CLASS (mode) == MODE_INT
9694 && ! can_compare_p (GE, mode, ccp_jump))
9695 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9697 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9700 case UNORDERED_EXPR:
9703 enum rtx_code cmp, rcmp;
9706 if (code == UNORDERED_EXPR)
9707 cmp = UNORDERED, rcmp = ORDERED;
9709 cmp = ORDERED, rcmp = UNORDERED;
9710 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9713 if (! can_compare_p (cmp, mode, ccp_jump)
9714 && (can_compare_p (rcmp, mode, ccp_jump)
9715 /* If the target doesn't provide either UNORDERED or ORDERED
9716 comparisons, canonicalize on UNORDERED for the library. */
9717 || rcmp == UNORDERED))
9721 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9723 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9728 enum rtx_code rcode1;
9729 enum tree_code tcode2;
9753 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9754 if (can_compare_p (rcode1, mode, ccp_jump))
9755 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9759 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9760 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9763 /* If the target doesn't support combined unordered
9764 compares, decompose into UNORDERED + comparison. */
9765 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9766 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9767 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9768 do_jump (exp, if_false_label, if_true_label);
9775 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9777 /* This is not needed any more and causes poor code since it causes
9778 comparisons and tests from non-SI objects to have different code
9780 /* Copy to register to avoid generating bad insns by cse
9781 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9782 if (!cse_not_expected && GET_CODE (temp) == MEM)
9783 temp = copy_to_reg (temp);
9785 do_pending_stack_adjust ();
9786 /* Do any postincrements in the expression that was tested. */
9789 if (GET_CODE (temp) == CONST_INT
9790 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9791 || GET_CODE (temp) == LABEL_REF)
9793 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9797 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9798 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9799 /* Note swapping the labels gives us not-equal. */
9800 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9801 else if (GET_MODE (temp) != VOIDmode)
9802 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9803 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9804 GET_MODE (temp), NULL_RTX, 0,
9805 if_false_label, if_true_label);
9810 if (drop_through_label)
9812 /* If do_jump produces code that might be jumped around,
9813 do any stack adjusts from that code, before the place
9814 where control merges in. */
9815 do_pending_stack_adjust ();
9816 emit_label (drop_through_label);
9820 /* Given a comparison expression EXP for values too wide to be compared
9821 with one insn, test the comparison and jump to the appropriate label.
9822 The code of EXP is ignored; we always test GT if SWAP is 0,
9823 and LT if SWAP is 1. */
9826 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9829 rtx if_false_label, if_true_label;
9831 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9832 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9833 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9834 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9836 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9839 /* Compare OP0 with OP1, word at a time, in mode MODE.
9840 UNSIGNEDP says to do unsigned comparison.
9841 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9844 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9845 enum machine_mode mode;
9848 rtx if_false_label, if_true_label;
9850 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9851 rtx drop_through_label = 0;
9854 if (! if_true_label || ! if_false_label)
9855 drop_through_label = gen_label_rtx ();
9856 if (! if_true_label)
9857 if_true_label = drop_through_label;
9858 if (! if_false_label)
9859 if_false_label = drop_through_label;
9861 /* Compare a word at a time, high order first. */
9862 for (i = 0; i < nwords; i++)
9864 rtx op0_word, op1_word;
9866 if (WORDS_BIG_ENDIAN)
9868 op0_word = operand_subword_force (op0, i, mode);
9869 op1_word = operand_subword_force (op1, i, mode);
9873 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9874 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9877 /* All but high-order word must be compared as unsigned. */
9878 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9879 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9880 NULL_RTX, if_true_label);
9882 /* Consider lower words only if these are equal. */
9883 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9884 NULL_RTX, 0, NULL_RTX, if_false_label);
9888 emit_jump (if_false_label);
9889 if (drop_through_label)
9890 emit_label (drop_through_label);
9893 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9894 with one insn, test the comparison and jump to the appropriate label. */
9897 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9899 rtx if_false_label, if_true_label;
9901 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9902 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9903 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9904 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9906 rtx drop_through_label = 0;
9908 if (! if_false_label)
9909 drop_through_label = if_false_label = gen_label_rtx ();
9911 for (i = 0; i < nwords; i++)
9912 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9913 operand_subword_force (op1, i, mode),
9914 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9915 word_mode, NULL_RTX, 0, if_false_label,
9919 emit_jump (if_true_label);
9920 if (drop_through_label)
9921 emit_label (drop_through_label);
9924 /* Jump according to whether OP0 is 0.
9925 We assume that OP0 has an integer mode that is too wide
9926 for the available compare insns. */
9929 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9931 rtx if_false_label, if_true_label;
9933 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9936 rtx drop_through_label = 0;
9938 /* The fastest way of doing this comparison on almost any machine is to
9939 "or" all the words and compare the result. If all have to be loaded
9940 from memory and this is a very wide item, it's possible this may
9941 be slower, but that's highly unlikely. */
9943 part = gen_reg_rtx (word_mode);
9944 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9945 for (i = 1; i < nwords && part != 0; i++)
9946 part = expand_binop (word_mode, ior_optab, part,
9947 operand_subword_force (op0, i, GET_MODE (op0)),
9948 part, 1, OPTAB_WIDEN);
9952 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9953 NULL_RTX, 0, if_false_label, if_true_label);
9958 /* If we couldn't do the "or" simply, do this with a series of compares. */
9959 if (! if_false_label)
9960 drop_through_label = if_false_label = gen_label_rtx ();
9962 for (i = 0; i < nwords; i++)
9963 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9964 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9965 if_false_label, NULL_RTX);
9968 emit_jump (if_true_label);
9970 if (drop_through_label)
9971 emit_label (drop_through_label);
9974 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9975 (including code to compute the values to be compared)
9976 and set (CC0) according to the result.
9977 The decision as to signed or unsigned comparison must be made by the caller.
9979 We force a stack adjustment unless there are currently
9980 things pushed on the stack that aren't yet used.
9982 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9985 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9986 size of MODE should be used. */
9989 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9990 register rtx op0, op1;
9993 enum machine_mode mode;
9999 /* If one operand is constant, make it the second one. Only do this
10000 if the other operand is not constant as well. */
10002 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10003 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10008 code = swap_condition (code);
10011 if (flag_force_mem)
10013 op0 = force_not_mem (op0);
10014 op1 = force_not_mem (op1);
10017 do_pending_stack_adjust ();
10019 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10020 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10024 /* There's no need to do this now that combine.c can eliminate lots of
10025 sign extensions. This can be less efficient in certain cases on other
10028 /* If this is a signed equality comparison, we can do it as an
10029 unsigned comparison since zero-extension is cheaper than sign
10030 extension and comparisons with zero are done as unsigned. This is
10031 the case even on machines that can do fast sign extension, since
10032 zero-extension is easier to combine with other operations than
10033 sign-extension is. If we are comparing against a constant, we must
10034 convert it to what it would look like unsigned. */
10035 if ((code == EQ || code == NE) && ! unsignedp
10036 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10038 if (GET_CODE (op1) == CONST_INT
10039 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10040 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10045 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10047 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10050 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10051 The decision as to signed or unsigned comparison must be made by the caller.
10053 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10056 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10057 size of MODE should be used. */
10060 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10061 if_false_label, if_true_label)
10062 register rtx op0, op1;
10063 enum rtx_code code;
10065 enum machine_mode mode;
10067 unsigned int align;
10068 rtx if_false_label, if_true_label;
10071 int dummy_true_label = 0;
10073 /* Reverse the comparison if that is safe and we want to jump if it is
10075 if (! if_true_label && ! FLOAT_MODE_P (mode))
10077 if_true_label = if_false_label;
10078 if_false_label = 0;
10079 code = reverse_condition (code);
10082 /* If one operand is constant, make it the second one. Only do this
10083 if the other operand is not constant as well. */
10085 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10086 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10091 code = swap_condition (code);
10094 if (flag_force_mem)
10096 op0 = force_not_mem (op0);
10097 op1 = force_not_mem (op1);
10100 do_pending_stack_adjust ();
10102 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10103 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10105 if (tem == const_true_rtx)
10108 emit_jump (if_true_label);
10112 if (if_false_label)
10113 emit_jump (if_false_label);
10119 /* There's no need to do this now that combine.c can eliminate lots of
10120 sign extensions. This can be less efficient in certain cases on other
10123 /* If this is a signed equality comparison, we can do it as an
10124 unsigned comparison since zero-extension is cheaper than sign
10125 extension and comparisons with zero are done as unsigned. This is
10126 the case even on machines that can do fast sign extension, since
10127 zero-extension is easier to combine with other operations than
10128 sign-extension is. If we are comparing against a constant, we must
10129 convert it to what it would look like unsigned. */
10130 if ((code == EQ || code == NE) && ! unsignedp
10131 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10133 if (GET_CODE (op1) == CONST_INT
10134 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10135 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10140 if (! if_true_label)
10142 dummy_true_label = 1;
10143 if_true_label = gen_label_rtx ();
10146 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10149 if (if_false_label)
10150 emit_jump (if_false_label);
10151 if (dummy_true_label)
10152 emit_label (if_true_label);
10155 /* Generate code for a comparison expression EXP (including code to compute
10156 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10157 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10158 generated code will drop through.
10159 SIGNED_CODE should be the rtx operation for this comparison for
10160 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10162 We force a stack adjustment unless there are currently
10163 things pushed on the stack that aren't yet used. */
10166 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10169 enum rtx_code signed_code, unsigned_code;
10170 rtx if_false_label, if_true_label;
10172 unsigned int align0, align1;
10173 register rtx op0, op1;
10174 register tree type;
10175 register enum machine_mode mode;
10177 enum rtx_code code;
10179 /* Don't crash if the comparison was erroneous. */
10180 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10181 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10184 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10185 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10188 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10189 mode = TYPE_MODE (type);
10190 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10191 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10192 || (GET_MODE_BITSIZE (mode)
10193 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10196 /* op0 might have been replaced by promoted constant, in which
10197 case the type of second argument should be used. */
10198 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10199 mode = TYPE_MODE (type);
10201 unsignedp = TREE_UNSIGNED (type);
10202 code = unsignedp ? unsigned_code : signed_code;
10204 #ifdef HAVE_canonicalize_funcptr_for_compare
10205 /* If function pointers need to be "canonicalized" before they can
10206 be reliably compared, then canonicalize them. */
10207 if (HAVE_canonicalize_funcptr_for_compare
10208 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10209 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10212 rtx new_op0 = gen_reg_rtx (mode);
10214 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10218 if (HAVE_canonicalize_funcptr_for_compare
10219 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10220 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10223 rtx new_op1 = gen_reg_rtx (mode);
10225 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10230 /* Do any postincrements in the expression that was tested. */
10233 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10235 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10236 MIN (align0, align1),
10237 if_false_label, if_true_label);
10240 /* Generate code to calculate EXP using a store-flag instruction
10241 and return an rtx for the result. EXP is either a comparison
10242 or a TRUTH_NOT_EXPR whose operand is a comparison.
10244 If TARGET is nonzero, store the result there if convenient.
10246 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10249 Return zero if there is no suitable set-flag instruction
10250 available on this machine.
10252 Once expand_expr has been called on the arguments of the comparison,
10253 we are committed to doing the store flag, since it is not safe to
10254 re-evaluate the expression. We emit the store-flag insn by calling
10255 emit_store_flag, but only expand the arguments if we have a reason
10256 to believe that emit_store_flag will be successful. If we think that
10257 it will, but it isn't, we have to simulate the store-flag with a
10258 set/jump/set sequence. */
10261 do_store_flag (exp, target, mode, only_cheap)
10264 enum machine_mode mode;
10267 enum rtx_code code;
10268 tree arg0, arg1, type;
10270 enum machine_mode operand_mode;
10274 enum insn_code icode;
10275 rtx subtarget = target;
10278 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10279 result at the end. We can't simply invert the test since it would
10280 have already been inverted if it were valid. This case occurs for
10281 some floating-point comparisons. */
10283 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10284 invert = 1, exp = TREE_OPERAND (exp, 0);
10286 arg0 = TREE_OPERAND (exp, 0);
10287 arg1 = TREE_OPERAND (exp, 1);
10289 /* Don't crash if the comparison was erroneous. */
10290 if (arg0 == error_mark_node || arg1 == error_mark_node)
10293 type = TREE_TYPE (arg0);
10294 operand_mode = TYPE_MODE (type);
10295 unsignedp = TREE_UNSIGNED (type);
10297 /* We won't bother with BLKmode store-flag operations because it would mean
10298 passing a lot of information to emit_store_flag. */
10299 if (operand_mode == BLKmode)
10302 /* We won't bother with store-flag operations involving function pointers
10303 when function pointers must be canonicalized before comparisons. */
10304 #ifdef HAVE_canonicalize_funcptr_for_compare
10305 if (HAVE_canonicalize_funcptr_for_compare
10306 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10307 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10309 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10310 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10311 == FUNCTION_TYPE))))
10318 /* Get the rtx comparison code to use. We know that EXP is a comparison
10319 operation of some type. Some comparisons against 1 and -1 can be
10320 converted to comparisons with zero. Do so here so that the tests
10321 below will be aware that we have a comparison with zero. These
10322 tests will not catch constants in the first operand, but constants
10323 are rarely passed as the first operand. */
10325 switch (TREE_CODE (exp))
10334 if (integer_onep (arg1))
10335 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10337 code = unsignedp ? LTU : LT;
10340 if (! unsignedp && integer_all_onesp (arg1))
10341 arg1 = integer_zero_node, code = LT;
10343 code = unsignedp ? LEU : LE;
10346 if (! unsignedp && integer_all_onesp (arg1))
10347 arg1 = integer_zero_node, code = GE;
10349 code = unsignedp ? GTU : GT;
10352 if (integer_onep (arg1))
10353 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10355 code = unsignedp ? GEU : GE;
10358 case UNORDERED_EXPR:
10384 /* Put a constant second. */
10385 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10387 tem = arg0; arg0 = arg1; arg1 = tem;
10388 code = swap_condition (code);
10391 /* If this is an equality or inequality test of a single bit, we can
10392 do this by shifting the bit being tested to the low-order bit and
10393 masking the result with the constant 1. If the condition was EQ,
10394 we xor it with 1. This does not require an scc insn and is faster
10395 than an scc insn even if we have it. */
10397 if ((code == NE || code == EQ)
10398 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10399 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10401 tree inner = TREE_OPERAND (arg0, 0);
10402 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10405 /* If INNER is a right shift of a constant and it plus BITNUM does
10406 not overflow, adjust BITNUM and INNER. */
10408 if (TREE_CODE (inner) == RSHIFT_EXPR
10409 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10410 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10411 && bitnum < TYPE_PRECISION (type)
10412 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10413 bitnum - TYPE_PRECISION (type)))
10415 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10416 inner = TREE_OPERAND (inner, 0);
10419 /* If we are going to be able to omit the AND below, we must do our
10420 operations as unsigned. If we must use the AND, we have a choice.
10421 Normally unsigned is faster, but for some machines signed is. */
10422 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10423 #ifdef LOAD_EXTEND_OP
10424 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10430 if (! get_subtarget (subtarget)
10431 || GET_MODE (subtarget) != operand_mode
10432 || ! safe_from_p (subtarget, inner, 1))
10435 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10438 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10439 size_int (bitnum), subtarget, ops_unsignedp);
10441 if (GET_MODE (op0) != mode)
10442 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10444 if ((code == EQ && ! invert) || (code == NE && invert))
10445 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10446 ops_unsignedp, OPTAB_LIB_WIDEN);
10448 /* Put the AND last so it can combine with more things. */
10449 if (bitnum != TYPE_PRECISION (type) - 1)
10450 op0 = expand_and (op0, const1_rtx, subtarget);
10455 /* Now see if we are likely to be able to do this. Return if not. */
10456 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10459 icode = setcc_gen_code[(int) code];
10460 if (icode == CODE_FOR_nothing
10461 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10463 /* We can only do this if it is one of the special cases that
10464 can be handled without an scc insn. */
10465 if ((code == LT && integer_zerop (arg1))
10466 || (! only_cheap && code == GE && integer_zerop (arg1)))
10468 else if (BRANCH_COST >= 0
10469 && ! only_cheap && (code == NE || code == EQ)
10470 && TREE_CODE (type) != REAL_TYPE
10471 && ((abs_optab->handlers[(int) operand_mode].insn_code
10472 != CODE_FOR_nothing)
10473 || (ffs_optab->handlers[(int) operand_mode].insn_code
10474 != CODE_FOR_nothing)))
10480 if (! get_subtarget (target)
10481 || GET_MODE (subtarget) != operand_mode
10482 || ! safe_from_p (subtarget, arg1, 1))
10485 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10486 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10489 target = gen_reg_rtx (mode);
10491 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10492 because, if the emit_store_flag does anything it will succeed and
10493 OP0 and OP1 will not be used subsequently. */
10495 result = emit_store_flag (target, code,
10496 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10497 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10498 operand_mode, unsignedp, 1);
10503 result = expand_binop (mode, xor_optab, result, const1_rtx,
10504 result, 0, OPTAB_LIB_WIDEN);
10508 /* If this failed, we have to do this with set/compare/jump/set code. */
10509 if (GET_CODE (target) != REG
10510 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10511 target = gen_reg_rtx (GET_MODE (target));
10513 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10514 result = compare_from_rtx (op0, op1, code, unsignedp,
10515 operand_mode, NULL_RTX, 0);
10516 if (GET_CODE (result) == CONST_INT)
10517 return (((result == const0_rtx && ! invert)
10518 || (result != const0_rtx && invert))
10519 ? const0_rtx : const1_rtx);
10521 label = gen_label_rtx ();
10522 if (bcc_gen_fctn[(int) code] == 0)
10525 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10526 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10527 emit_label (label);
10532 /* Generate a tablejump instruction (used for switch statements). */
10534 #ifdef HAVE_tablejump
10536 /* INDEX is the value being switched on, with the lowest value
10537 in the table already subtracted.
10538 MODE is its expected mode (needed if INDEX is constant).
10539 RANGE is the length of the jump table.
10540 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10542 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10543 index value is out of range. */
10546 do_tablejump (index, mode, range, table_label, default_label)
10547 rtx index, range, table_label, default_label;
10548 enum machine_mode mode;
10550 register rtx temp, vector;
10552 /* Do an unsigned comparison (in the proper mode) between the index
10553 expression and the value which represents the length of the range.
10554 Since we just finished subtracting the lower bound of the range
10555 from the index expression, this comparison allows us to simultaneously
10556 check that the original index expression value is both greater than
10557 or equal to the minimum value of the range and less than or equal to
10558 the maximum value of the range. */
10560 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10563 /* If index is in range, it must fit in Pmode.
10564 Convert to Pmode so we can index with it. */
10566 index = convert_to_mode (Pmode, index, 1);
10568 /* Don't let a MEM slip thru, because then INDEX that comes
10569 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10570 and break_out_memory_refs will go to work on it and mess it up. */
10571 #ifdef PIC_CASE_VECTOR_ADDRESS
10572 if (flag_pic && GET_CODE (index) != REG)
10573 index = copy_to_mode_reg (Pmode, index);
10576 /* If flag_force_addr were to affect this address
10577 it could interfere with the tricky assumptions made
10578 about addresses that contain label-refs,
10579 which may be valid only very near the tablejump itself. */
10580 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10581 GET_MODE_SIZE, because this indicates how large insns are. The other
10582 uses should all be Pmode, because they are addresses. This code
10583 could fail if addresses and insns are not the same size. */
10584 index = gen_rtx_PLUS (Pmode,
10585 gen_rtx_MULT (Pmode, index,
10586 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10587 gen_rtx_LABEL_REF (Pmode, table_label));
10588 #ifdef PIC_CASE_VECTOR_ADDRESS
10590 index = PIC_CASE_VECTOR_ADDRESS (index);
10593 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10594 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10595 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10596 RTX_UNCHANGING_P (vector) = 1;
10597 convert_move (temp, vector, 0);
10599 emit_jump_insn (gen_tablejump (temp, table_label));
10601 /* If we are generating PIC code or if the table is PC-relative, the
10602 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10603 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10607 #endif /* HAVE_tablejump */