1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
52 /* Supply a default definition for PUSH_ARGS. */
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
79 #define STACK_PUSH_CODE PRE_INC
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
88 /* Hook called by safe_from_p for language-specific tree codes. It is
89 up to the language front-end to install a hook if it has any such
90 codes that safe_from_p needs to know about. Since same_from_p will
91 recursively explore the TREE_OPERANDs of an expression, this hook
92 should not reexamine those pieces. This routine may recursively
93 call safe_from_p; it should always pass `0' as the TOP_P
95 int (*lang_safe_from_p) PARAMS ((rtx, tree));
97 /* If this is nonzero, we do not bother generating VOLATILE
98 around volatile memory references, and we are willing to
99 output indirect addresses. If cse is to follow, we reject
100 indirect addresses so a useful potential cse is generated;
101 if it is used only once, instruction combination will produce
102 the same indirect address eventually. */
103 int cse_not_expected;
105 /* Don't check memory usage, since code is being emitted to check a memory
106 usage. Used when current_function_check_memory_usage is true, to avoid
107 infinite recursion. */
108 static int in_check_memory_usage;
110 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
111 static tree placeholder_list = 0;
113 /* This structure is used by move_by_pieces to describe the move to
115 struct move_by_pieces
124 int explicit_inc_from;
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
130 /* This structure is used by store_by_pieces to describe the clear to
133 struct store_by_pieces
139 unsigned HOST_WIDE_INT len;
140 HOST_WIDE_INT offset;
141 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
146 extern struct obstack permanent_obstack;
148 static rtx get_push_address PARAMS ((int));
150 static rtx enqueue_insn PARAMS ((rtx, rtx));
151 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
152 PARAMS ((unsigned HOST_WIDE_INT,
154 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
155 struct move_by_pieces *));
156 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
158 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
160 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
162 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
164 struct store_by_pieces *));
165 static rtx get_subtarget PARAMS ((rtx));
166 static int is_zeros_p PARAMS ((tree));
167 static int mostly_zeros_p PARAMS ((tree));
168 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
169 HOST_WIDE_INT, enum machine_mode,
170 tree, tree, unsigned int, int,
172 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
174 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
175 HOST_WIDE_INT, enum machine_mode,
176 tree, enum machine_mode, int,
177 unsigned int, HOST_WIDE_INT, int));
178 static enum memory_use_mode
179 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
180 static tree save_noncopied_parts PARAMS ((tree, tree));
181 static tree init_noncopied_parts PARAMS ((tree, tree));
182 static int fixed_type_p PARAMS ((tree));
183 static rtx var_rtx PARAMS ((tree));
184 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
185 static rtx expand_increment PARAMS ((tree, int, int));
186 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
187 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
188 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
190 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
192 /* Record for each mode whether we can move a register directly to or
193 from an object of that mode in memory. If we can't, we won't try
194 to use that mode directly when accessing a field of that mode. */
196 static char direct_load[NUM_MACHINE_MODES];
197 static char direct_store[NUM_MACHINE_MODES];
199 /* If a memory-to-memory move would take MOVE_RATIO or more simple
200 move-instruction sequences, we will do a movstr or libcall instead. */
203 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
206 /* If we are optimizing for space (-Os), cut down the default move ratio. */
207 #define MOVE_RATIO (optimize_size ? 3 : 15)
211 /* This macro is used to determine whether move_by_pieces should be called
212 to perform a structure copy. */
213 #ifndef MOVE_BY_PIECES_P
214 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
215 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
218 /* This array records the insn_code of insns to perform block moves. */
219 enum insn_code movstr_optab[NUM_MACHINE_MODES];
221 /* This array records the insn_code of insns to perform block clears. */
222 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
224 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
226 #ifndef SLOW_UNALIGNED_ACCESS
227 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
230 /* This is run once per compilation to set up which modes can be used
231 directly in memory and to initialize the block move optab. */
237 enum machine_mode mode;
243 /* Try indexing by frame ptr and try by stack ptr.
244 It is known that on the Convex the stack ptr isn't a valid index.
245 With luck, one or the other is valid on any machine. */
246 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
247 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
249 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
250 pat = PATTERN (insn);
252 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
253 mode = (enum machine_mode) ((int) mode + 1))
258 direct_load[(int) mode] = direct_store[(int) mode] = 0;
259 PUT_MODE (mem, mode);
260 PUT_MODE (mem1, mode);
262 /* See if there is some register that can be used in this mode and
263 directly loaded or stored from memory. */
265 if (mode != VOIDmode && mode != BLKmode)
266 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
267 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
270 if (! HARD_REGNO_MODE_OK (regno, mode))
273 reg = gen_rtx_REG (mode, regno);
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
280 SET_SRC (pat) = mem1;
281 SET_DEST (pat) = reg;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_load[(int) mode] = 1;
286 SET_DEST (pat) = mem;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
291 SET_DEST (pat) = mem1;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_store[(int) mode] = 1;
300 /* This is run at the start of compiling a function. */
305 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
308 pending_stack_adjust = 0;
309 stack_pointer_delta = 0;
310 inhibit_defer_pop = 0;
312 apply_args_value = 0;
318 struct expr_status *p;
323 ggc_mark_rtx (p->x_saveregs_value);
324 ggc_mark_rtx (p->x_apply_args_value);
325 ggc_mark_rtx (p->x_forced_labels);
336 /* Small sanity check that the queue is empty at the end of a function. */
339 finish_expr_for_function ()
345 /* Manage the queue of increment instructions to be output
346 for POSTINCREMENT_EXPR expressions, etc. */
348 /* Queue up to increment (or change) VAR later. BODY says how:
349 BODY should be the same thing you would pass to emit_insn
350 to increment right away. It will go to emit_insn later on.
352 The value is a QUEUED expression to be used in place of VAR
353 where you want to guarantee the pre-incrementation value of VAR. */
356 enqueue_insn (var, body)
359 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
360 body, pending_chain);
361 return pending_chain;
364 /* Use protect_from_queue to convert a QUEUED expression
365 into something that you can put immediately into an instruction.
366 If the queued incrementation has not happened yet,
367 protect_from_queue returns the variable itself.
368 If the incrementation has happened, protect_from_queue returns a temp
369 that contains a copy of the old value of the variable.
371 Any time an rtx which might possibly be a QUEUED is to be put
372 into an instruction, it must be passed through protect_from_queue first.
373 QUEUED expressions are not meaningful in instructions.
375 Do not pass a value through protect_from_queue and then hold
376 on to it for a while before putting it in an instruction!
377 If the queue is flushed in between, incorrect code will result. */
380 protect_from_queue (x, modify)
384 register RTX_CODE code = GET_CODE (x);
386 #if 0 /* A QUEUED can hang around after the queue is forced out. */
387 /* Shortcut for most common case. */
388 if (pending_chain == 0)
394 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
395 use of autoincrement. Make a copy of the contents of the memory
396 location rather than a copy of the address, but not if the value is
397 of mode BLKmode. Don't modify X in place since it might be
399 if (code == MEM && GET_MODE (x) != BLKmode
400 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
402 register rtx y = XEXP (x, 0);
403 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
405 MEM_COPY_ATTRIBUTES (new, x);
409 register rtx temp = gen_reg_rtx (GET_MODE (new));
410 emit_insn_before (gen_move_insn (temp, new),
416 /* Otherwise, recursively protect the subexpressions of all
417 the kinds of rtx's that can contain a QUEUED. */
420 rtx tem = protect_from_queue (XEXP (x, 0), 0);
421 if (tem != XEXP (x, 0))
427 else if (code == PLUS || code == MULT)
429 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
430 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
431 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
440 /* If the increment has not happened, use the variable itself. */
441 if (QUEUED_INSN (x) == 0)
442 return QUEUED_VAR (x);
443 /* If the increment has happened and a pre-increment copy exists,
445 if (QUEUED_COPY (x) != 0)
446 return QUEUED_COPY (x);
447 /* The increment has happened but we haven't set up a pre-increment copy.
448 Set one up now, and use it. */
449 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
450 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
452 return QUEUED_COPY (x);
455 /* Return nonzero if X contains a QUEUED expression:
456 if it contains anything that will be altered by a queued increment.
457 We handle only combinations of MEM, PLUS, MINUS and MULT operators
458 since memory addresses generally contain only those. */
464 register enum rtx_code code = GET_CODE (x);
470 return queued_subexp_p (XEXP (x, 0));
474 return (queued_subexp_p (XEXP (x, 0))
475 || queued_subexp_p (XEXP (x, 1)));
481 /* Perform all the pending incrementations. */
487 while ((p = pending_chain))
489 rtx body = QUEUED_BODY (p);
491 if (GET_CODE (body) == SEQUENCE)
493 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
494 emit_insn (QUEUED_BODY (p));
497 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
498 pending_chain = QUEUED_NEXT (p);
502 /* Copy data from FROM to TO, where the machine modes are not the same.
503 Both modes may be integer, or both may be floating.
504 UNSIGNEDP should be nonzero if FROM is an unsigned type.
505 This causes zero-extension instead of sign-extension. */
508 convert_move (to, from, unsignedp)
509 register rtx to, from;
512 enum machine_mode to_mode = GET_MODE (to);
513 enum machine_mode from_mode = GET_MODE (from);
514 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
515 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
519 /* rtx code for making an equivalent value. */
520 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
522 to = protect_from_queue (to, 1);
523 from = protect_from_queue (from, 0);
525 if (to_real != from_real)
528 /* If FROM is a SUBREG that indicates that we have already done at least
529 the required extension, strip it. We don't handle such SUBREGs as
532 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
533 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
534 >= GET_MODE_SIZE (to_mode))
535 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
536 from = gen_lowpart (to_mode, from), from_mode = to_mode;
538 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
541 if (to_mode == from_mode
542 || (from_mode == VOIDmode && CONSTANT_P (from)))
544 emit_move_insn (to, from);
548 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
550 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
553 if (VECTOR_MODE_P (to_mode))
554 from = gen_rtx_SUBREG (to_mode, from, 0);
556 to = gen_rtx_SUBREG (from_mode, to, 0);
558 emit_move_insn (to, from);
562 if (to_real != from_real)
569 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
571 /* Try converting directly if the insn is supported. */
572 if ((code = can_extend_p (to_mode, from_mode, 0))
575 emit_unop_insn (code, to, from, UNKNOWN);
580 #ifdef HAVE_trunchfqf2
581 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
583 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
587 #ifdef HAVE_trunctqfqf2
588 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
590 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
594 #ifdef HAVE_truncsfqf2
595 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
597 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
601 #ifdef HAVE_truncdfqf2
602 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
604 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
608 #ifdef HAVE_truncxfqf2
609 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
611 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
615 #ifdef HAVE_trunctfqf2
616 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
618 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
623 #ifdef HAVE_trunctqfhf2
624 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
626 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
630 #ifdef HAVE_truncsfhf2
631 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
633 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
637 #ifdef HAVE_truncdfhf2
638 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
640 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
644 #ifdef HAVE_truncxfhf2
645 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
647 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
651 #ifdef HAVE_trunctfhf2
652 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
654 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
659 #ifdef HAVE_truncsftqf2
660 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
662 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
666 #ifdef HAVE_truncdftqf2
667 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
669 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
673 #ifdef HAVE_truncxftqf2
674 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
676 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
680 #ifdef HAVE_trunctftqf2
681 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
683 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
688 #ifdef HAVE_truncdfsf2
689 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
691 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
695 #ifdef HAVE_truncxfsf2
696 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
698 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
702 #ifdef HAVE_trunctfsf2
703 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
705 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
709 #ifdef HAVE_truncxfdf2
710 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
712 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
716 #ifdef HAVE_trunctfdf2
717 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
719 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
731 libcall = extendsfdf2_libfunc;
735 libcall = extendsfxf2_libfunc;
739 libcall = extendsftf2_libfunc;
751 libcall = truncdfsf2_libfunc;
755 libcall = extenddfxf2_libfunc;
759 libcall = extenddftf2_libfunc;
771 libcall = truncxfsf2_libfunc;
775 libcall = truncxfdf2_libfunc;
787 libcall = trunctfsf2_libfunc;
791 libcall = trunctfdf2_libfunc;
803 if (libcall == (rtx) 0)
804 /* This conversion is not implemented yet. */
808 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
810 insns = get_insns ();
812 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
817 /* Now both modes are integers. */
819 /* Handle expanding beyond a word. */
820 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
821 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
828 enum machine_mode lowpart_mode;
829 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
831 /* Try converting directly if the insn is supported. */
832 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
835 /* If FROM is a SUBREG, put it into a register. Do this
836 so that we always generate the same set of insns for
837 better cse'ing; if an intermediate assignment occurred,
838 we won't be doing the operation directly on the SUBREG. */
839 if (optimize > 0 && GET_CODE (from) == SUBREG)
840 from = force_reg (from_mode, from);
841 emit_unop_insn (code, to, from, equiv_code);
844 /* Next, try converting via full word. */
845 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
846 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
847 != CODE_FOR_nothing))
849 if (GET_CODE (to) == REG)
850 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
851 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
852 emit_unop_insn (code, to,
853 gen_lowpart (word_mode, to), equiv_code);
857 /* No special multiword conversion insn; do it by hand. */
860 /* Since we will turn this into a no conflict block, we must ensure
861 that the source does not overlap the target. */
863 if (reg_overlap_mentioned_p (to, from))
864 from = force_reg (from_mode, from);
866 /* Get a copy of FROM widened to a word, if necessary. */
867 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
868 lowpart_mode = word_mode;
870 lowpart_mode = from_mode;
872 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
874 lowpart = gen_lowpart (lowpart_mode, to);
875 emit_move_insn (lowpart, lowfrom);
877 /* Compute the value to put in each remaining word. */
879 fill_value = const0_rtx;
884 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
885 && STORE_FLAG_VALUE == -1)
887 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
889 fill_value = gen_reg_rtx (word_mode);
890 emit_insn (gen_slt (fill_value));
896 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
897 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
899 fill_value = convert_to_mode (word_mode, fill_value, 1);
903 /* Fill the remaining words. */
904 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
906 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
907 rtx subword = operand_subword (to, index, 1, to_mode);
912 if (fill_value != subword)
913 emit_move_insn (subword, fill_value);
916 insns = get_insns ();
919 emit_no_conflict_block (insns, to, from, NULL_RTX,
920 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
924 /* Truncating multi-word to a word or less. */
925 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
926 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
928 if (!((GET_CODE (from) == MEM
929 && ! MEM_VOLATILE_P (from)
930 && direct_load[(int) to_mode]
931 && ! mode_dependent_address_p (XEXP (from, 0)))
932 || GET_CODE (from) == REG
933 || GET_CODE (from) == SUBREG))
934 from = force_reg (from_mode, from);
935 convert_move (to, gen_lowpart (word_mode, from), 0);
939 /* Handle pointer conversion. */ /* SPEE 900220. */
940 if (to_mode == PQImode)
942 if (from_mode != QImode)
943 from = convert_to_mode (QImode, from, unsignedp);
945 #ifdef HAVE_truncqipqi2
946 if (HAVE_truncqipqi2)
948 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
951 #endif /* HAVE_truncqipqi2 */
955 if (from_mode == PQImode)
957 if (to_mode != QImode)
959 from = convert_to_mode (QImode, from, unsignedp);
964 #ifdef HAVE_extendpqiqi2
965 if (HAVE_extendpqiqi2)
967 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
970 #endif /* HAVE_extendpqiqi2 */
975 if (to_mode == PSImode)
977 if (from_mode != SImode)
978 from = convert_to_mode (SImode, from, unsignedp);
980 #ifdef HAVE_truncsipsi2
981 if (HAVE_truncsipsi2)
983 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
986 #endif /* HAVE_truncsipsi2 */
990 if (from_mode == PSImode)
992 if (to_mode != SImode)
994 from = convert_to_mode (SImode, from, unsignedp);
999 #ifdef HAVE_extendpsisi2
1000 if (! unsignedp && HAVE_extendpsisi2)
1002 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1005 #endif /* HAVE_extendpsisi2 */
1006 #ifdef HAVE_zero_extendpsisi2
1007 if (unsignedp && HAVE_zero_extendpsisi2)
1009 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1012 #endif /* HAVE_zero_extendpsisi2 */
1017 if (to_mode == PDImode)
1019 if (from_mode != DImode)
1020 from = convert_to_mode (DImode, from, unsignedp);
1022 #ifdef HAVE_truncdipdi2
1023 if (HAVE_truncdipdi2)
1025 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1028 #endif /* HAVE_truncdipdi2 */
1032 if (from_mode == PDImode)
1034 if (to_mode != DImode)
1036 from = convert_to_mode (DImode, from, unsignedp);
1041 #ifdef HAVE_extendpdidi2
1042 if (HAVE_extendpdidi2)
1044 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1047 #endif /* HAVE_extendpdidi2 */
1052 /* Now follow all the conversions between integers
1053 no more than a word long. */
1055 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1056 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1057 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1058 GET_MODE_BITSIZE (from_mode)))
1060 if (!((GET_CODE (from) == MEM
1061 && ! MEM_VOLATILE_P (from)
1062 && direct_load[(int) to_mode]
1063 && ! mode_dependent_address_p (XEXP (from, 0)))
1064 || GET_CODE (from) == REG
1065 || GET_CODE (from) == SUBREG))
1066 from = force_reg (from_mode, from);
1067 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1068 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1069 from = copy_to_reg (from);
1070 emit_move_insn (to, gen_lowpart (to_mode, from));
1074 /* Handle extension. */
1075 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1077 /* Convert directly if that works. */
1078 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1079 != CODE_FOR_nothing)
1081 emit_unop_insn (code, to, from, equiv_code);
1086 enum machine_mode intermediate;
1090 /* Search for a mode to convert via. */
1091 for (intermediate = from_mode; intermediate != VOIDmode;
1092 intermediate = GET_MODE_WIDER_MODE (intermediate))
1093 if (((can_extend_p (to_mode, intermediate, unsignedp)
1094 != CODE_FOR_nothing)
1095 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1096 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1097 GET_MODE_BITSIZE (intermediate))))
1098 && (can_extend_p (intermediate, from_mode, unsignedp)
1099 != CODE_FOR_nothing))
1101 convert_move (to, convert_to_mode (intermediate, from,
1102 unsignedp), unsignedp);
1106 /* No suitable intermediate mode.
1107 Generate what we need with shifts. */
1108 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1109 - GET_MODE_BITSIZE (from_mode), 0);
1110 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1111 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1113 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1116 emit_move_insn (to, tmp);
1121 /* Support special truncate insns for certain modes. */
1123 if (from_mode == DImode && to_mode == SImode)
1125 #ifdef HAVE_truncdisi2
1126 if (HAVE_truncdisi2)
1128 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1132 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 if (from_mode == DImode && to_mode == HImode)
1138 #ifdef HAVE_truncdihi2
1139 if (HAVE_truncdihi2)
1141 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1145 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 if (from_mode == DImode && to_mode == QImode)
1151 #ifdef HAVE_truncdiqi2
1152 if (HAVE_truncdiqi2)
1154 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1158 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 if (from_mode == SImode && to_mode == HImode)
1164 #ifdef HAVE_truncsihi2
1165 if (HAVE_truncsihi2)
1167 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 if (from_mode == SImode && to_mode == QImode)
1177 #ifdef HAVE_truncsiqi2
1178 if (HAVE_truncsiqi2)
1180 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 if (from_mode == HImode && to_mode == QImode)
1190 #ifdef HAVE_trunchiqi2
1191 if (HAVE_trunchiqi2)
1193 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 if (from_mode == TImode && to_mode == DImode)
1203 #ifdef HAVE_trunctidi2
1204 if (HAVE_trunctidi2)
1206 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 if (from_mode == TImode && to_mode == SImode)
1216 #ifdef HAVE_trunctisi2
1217 if (HAVE_trunctisi2)
1219 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 if (from_mode == TImode && to_mode == HImode)
1229 #ifdef HAVE_trunctihi2
1230 if (HAVE_trunctihi2)
1232 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1236 convert_move (to, force_reg (from_mode, from), unsignedp);
1240 if (from_mode == TImode && to_mode == QImode)
1242 #ifdef HAVE_trunctiqi2
1243 if (HAVE_trunctiqi2)
1245 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1249 convert_move (to, force_reg (from_mode, from), unsignedp);
1253 /* Handle truncation of volatile memrefs, and so on;
1254 the things that couldn't be truncated directly,
1255 and for which there was no special instruction. */
1256 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1258 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1259 emit_move_insn (to, temp);
1263 /* Mode combination is not recognized. */
1267 /* Return an rtx for a value that would result
1268 from converting X to mode MODE.
1269 Both X and MODE may be floating, or both integer.
1270 UNSIGNEDP is nonzero if X is an unsigned value.
1271 This can be done by referring to a part of X in place
1272 or by copying to a new temporary with conversion.
1274 This function *must not* call protect_from_queue
1275 except when putting X into an insn (in which case convert_move does it). */
1278 convert_to_mode (mode, x, unsignedp)
1279 enum machine_mode mode;
1283 return convert_modes (mode, VOIDmode, x, unsignedp);
1286 /* Return an rtx for a value that would result
1287 from converting X from mode OLDMODE to mode MODE.
1288 Both modes may be floating, or both integer.
1289 UNSIGNEDP is nonzero if X is an unsigned value.
1291 This can be done by referring to a part of X in place
1292 or by copying to a new temporary with conversion.
1294 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1296 This function *must not* call protect_from_queue
1297 except when putting X into an insn (in which case convert_move does it). */
1300 convert_modes (mode, oldmode, x, unsignedp)
1301 enum machine_mode mode, oldmode;
1307 /* If FROM is a SUBREG that indicates that we have already done at least
1308 the required extension, strip it. */
1310 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1311 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1312 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1313 x = gen_lowpart (mode, x);
1315 if (GET_MODE (x) != VOIDmode)
1316 oldmode = GET_MODE (x);
1318 if (mode == oldmode)
1321 /* There is one case that we must handle specially: If we are converting
1322 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1323 we are to interpret the constant as unsigned, gen_lowpart will do
1324 the wrong if the constant appears negative. What we want to do is
1325 make the high-order word of the constant zero, not all ones. */
1327 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1328 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1329 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1331 HOST_WIDE_INT val = INTVAL (x);
1333 if (oldmode != VOIDmode
1334 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1336 int width = GET_MODE_BITSIZE (oldmode);
1338 /* We need to zero extend VAL. */
1339 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1342 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1345 /* We can do this with a gen_lowpart if both desired and current modes
1346 are integer, and this is either a constant integer, a register, or a
1347 non-volatile MEM. Except for the constant case where MODE is no
1348 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1350 if ((GET_CODE (x) == CONST_INT
1351 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1352 || (GET_MODE_CLASS (mode) == MODE_INT
1353 && GET_MODE_CLASS (oldmode) == MODE_INT
1354 && (GET_CODE (x) == CONST_DOUBLE
1355 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1356 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1357 && direct_load[(int) mode])
1358 || (GET_CODE (x) == REG
1359 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1360 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1362 /* ?? If we don't know OLDMODE, we have to assume here that
1363 X does not need sign- or zero-extension. This may not be
1364 the case, but it's the best we can do. */
1365 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1366 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1368 HOST_WIDE_INT val = INTVAL (x);
1369 int width = GET_MODE_BITSIZE (oldmode);
1371 /* We must sign or zero-extend in this case. Start by
1372 zero-extending, then sign extend if we need to. */
1373 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1375 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1376 val |= (HOST_WIDE_INT) (-1) << width;
1378 return GEN_INT (val);
1381 return gen_lowpart (mode, x);
1384 temp = gen_reg_rtx (mode);
1385 convert_move (temp, x, unsignedp);
1389 /* This macro is used to determine what the largest unit size that
1390 move_by_pieces can use is. */
1392 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1393 move efficiently, as opposed to MOVE_MAX which is the maximum
1394 number of bytes we can move with a single instruction. */
1396 #ifndef MOVE_MAX_PIECES
1397 #define MOVE_MAX_PIECES MOVE_MAX
1400 /* Generate several move instructions to copy LEN bytes
1401 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1402 The caller must pass FROM and TO
1403 through protect_from_queue before calling.
1404 ALIGN is maximum alignment we can assume. */
1407 move_by_pieces (to, from, len, align)
1409 unsigned HOST_WIDE_INT len;
1412 struct move_by_pieces data;
1413 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1414 unsigned int max_size = MOVE_MAX_PIECES + 1;
1415 enum machine_mode mode = VOIDmode, tmode;
1416 enum insn_code icode;
1419 data.to_addr = to_addr;
1420 data.from_addr = from_addr;
1424 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1425 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1427 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1428 || GET_CODE (from_addr) == POST_INC
1429 || GET_CODE (from_addr) == POST_DEC);
1431 data.explicit_inc_from = 0;
1432 data.explicit_inc_to = 0;
1434 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1435 if (data.reverse) data.offset = len;
1438 /* If copying requires more than two move insns,
1439 copy addresses to registers (to make displacements shorter)
1440 and use post-increment if available. */
1441 if (!(data.autinc_from && data.autinc_to)
1442 && move_by_pieces_ninsns (len, align) > 2)
1444 /* Find the mode of the largest move... */
1445 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1446 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1447 if (GET_MODE_SIZE (tmode) < max_size)
1450 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1452 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1453 data.autinc_from = 1;
1454 data.explicit_inc_from = -1;
1456 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 data.autinc_from = 1;
1460 data.explicit_inc_from = 1;
1462 if (!data.autinc_from && CONSTANT_P (from_addr))
1463 data.from_addr = copy_addr_to_reg (from_addr);
1464 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1466 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1468 data.explicit_inc_to = -1;
1470 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1472 data.to_addr = copy_addr_to_reg (to_addr);
1474 data.explicit_inc_to = 1;
1476 if (!data.autinc_to && CONSTANT_P (to_addr))
1477 data.to_addr = copy_addr_to_reg (to_addr);
1480 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1481 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1482 align = MOVE_MAX * BITS_PER_UNIT;
1484 /* First move what we can in the largest integer mode, then go to
1485 successively smaller modes. */
1487 while (max_size > 1)
1489 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1490 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1491 if (GET_MODE_SIZE (tmode) < max_size)
1494 if (mode == VOIDmode)
1497 icode = mov_optab->handlers[(int) mode].insn_code;
1498 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1499 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1501 max_size = GET_MODE_SIZE (mode);
1504 /* The code above should have handled everything. */
1509 /* Return number of insns required to move L bytes by pieces.
1510 ALIGN (in bytes) is maximum alignment we can assume. */
1512 static unsigned HOST_WIDE_INT
1513 move_by_pieces_ninsns (l, align)
1514 unsigned HOST_WIDE_INT l;
1517 unsigned HOST_WIDE_INT n_insns = 0;
1518 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1520 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1521 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1522 align = MOVE_MAX * BITS_PER_UNIT;
1524 while (max_size > 1)
1526 enum machine_mode mode = VOIDmode, tmode;
1527 enum insn_code icode;
1529 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1530 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1531 if (GET_MODE_SIZE (tmode) < max_size)
1534 if (mode == VOIDmode)
1537 icode = mov_optab->handlers[(int) mode].insn_code;
1538 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1539 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1541 max_size = GET_MODE_SIZE (mode);
1549 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1550 with move instructions for mode MODE. GENFUN is the gen_... function
1551 to make a move insn for that mode. DATA has all the other info. */
1554 move_by_pieces_1 (genfun, mode, data)
1555 rtx (*genfun) PARAMS ((rtx, ...));
1556 enum machine_mode mode;
1557 struct move_by_pieces *data;
1559 unsigned int size = GET_MODE_SIZE (mode);
1562 while (data->len >= size)
1565 data->offset -= size;
1567 if (data->autinc_to)
1569 to1 = gen_rtx_MEM (mode, data->to_addr);
1570 MEM_COPY_ATTRIBUTES (to1, data->to);
1573 to1 = change_address (data->to, mode,
1574 plus_constant (data->to_addr, data->offset));
1576 if (data->autinc_from)
1578 from1 = gen_rtx_MEM (mode, data->from_addr);
1579 MEM_COPY_ATTRIBUTES (from1, data->from);
1582 from1 = change_address (data->from, mode,
1583 plus_constant (data->from_addr, data->offset));
1585 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1586 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1587 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1588 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1590 emit_insn ((*genfun) (to1, from1));
1592 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1593 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1594 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1595 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1597 if (! data->reverse)
1598 data->offset += size;
1604 /* Emit code to move a block Y to a block X.
1605 This may be done with string-move instructions,
1606 with multiple scalar move instructions, or with a library call.
1608 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1610 SIZE is an rtx that says how long they are.
1611 ALIGN is the maximum alignment we can assume they have.
1613 Return the address of the new block, if memcpy is called and returns it,
1617 emit_block_move (x, y, size, align)
1623 #ifdef TARGET_MEM_FUNCTIONS
1625 tree call_expr, arg_list;
1628 if (GET_MODE (x) != BLKmode)
1631 if (GET_MODE (y) != BLKmode)
1634 x = protect_from_queue (x, 1);
1635 y = protect_from_queue (y, 0);
1636 size = protect_from_queue (size, 0);
1638 if (GET_CODE (x) != MEM)
1640 if (GET_CODE (y) != MEM)
1645 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1646 move_by_pieces (x, y, INTVAL (size), align);
1649 /* Try the most limited insn first, because there's no point
1650 including more than one in the machine description unless
1651 the more limited one has some advantage. */
1653 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1654 enum machine_mode mode;
1656 /* Since this is a move insn, we don't care about volatility. */
1659 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1660 mode = GET_MODE_WIDER_MODE (mode))
1662 enum insn_code code = movstr_optab[(int) mode];
1663 insn_operand_predicate_fn pred;
1665 if (code != CODE_FOR_nothing
1666 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1667 here because if SIZE is less than the mode mask, as it is
1668 returned by the macro, it will definitely be less than the
1669 actual mode mask. */
1670 && ((GET_CODE (size) == CONST_INT
1671 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1672 <= (GET_MODE_MASK (mode) >> 1)))
1673 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1674 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1675 || (*pred) (x, BLKmode))
1676 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1677 || (*pred) (y, BLKmode))
1678 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1679 || (*pred) (opalign, VOIDmode)))
1682 rtx last = get_last_insn ();
1685 op2 = convert_to_mode (mode, size, 1);
1686 pred = insn_data[(int) code].operand[2].predicate;
1687 if (pred != 0 && ! (*pred) (op2, mode))
1688 op2 = copy_to_mode_reg (mode, op2);
1690 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1698 delete_insns_since (last);
1704 /* X, Y, or SIZE may have been passed through protect_from_queue.
1706 It is unsafe to save the value generated by protect_from_queue
1707 and reuse it later. Consider what happens if emit_queue is
1708 called before the return value from protect_from_queue is used.
1710 Expansion of the CALL_EXPR below will call emit_queue before
1711 we are finished emitting RTL for argument setup. So if we are
1712 not careful we could get the wrong value for an argument.
1714 To avoid this problem we go ahead and emit code to copy X, Y &
1715 SIZE into new pseudos. We can then place those new pseudos
1716 into an RTL_EXPR and use them later, even after a call to
1719 Note this is not strictly needed for library calls since they
1720 do not call emit_queue before loading their arguments. However,
1721 we may need to have library calls call emit_queue in the future
1722 since failing to do so could cause problems for targets which
1723 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1724 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1725 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1727 #ifdef TARGET_MEM_FUNCTIONS
1728 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1730 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1731 TREE_UNSIGNED (integer_type_node));
1732 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1735 #ifdef TARGET_MEM_FUNCTIONS
1736 /* It is incorrect to use the libcall calling conventions to call
1737 memcpy in this context.
1739 This could be a user call to memcpy and the user may wish to
1740 examine the return value from memcpy.
1742 For targets where libcalls and normal calls have different conventions
1743 for returning pointers, we could end up generating incorrect code.
1745 So instead of using a libcall sequence we build up a suitable
1746 CALL_EXPR and expand the call in the normal fashion. */
1747 if (fn == NULL_TREE)
1751 /* This was copied from except.c, I don't know if all this is
1752 necessary in this context or not. */
1753 fn = get_identifier ("memcpy");
1754 fntype = build_pointer_type (void_type_node);
1755 fntype = build_function_type (fntype, NULL_TREE);
1756 fn = build_decl (FUNCTION_DECL, fn, fntype);
1757 ggc_add_tree_root (&fn, 1);
1758 DECL_EXTERNAL (fn) = 1;
1759 TREE_PUBLIC (fn) = 1;
1760 DECL_ARTIFICIAL (fn) = 1;
1761 make_decl_rtl (fn, NULL_PTR);
1762 assemble_external (fn);
1765 /* We need to make an argument list for the function call.
1767 memcpy has three arguments, the first two are void * addresses and
1768 the last is a size_t byte count for the copy. */
1770 = build_tree_list (NULL_TREE,
1771 make_tree (build_pointer_type (void_type_node), x));
1772 TREE_CHAIN (arg_list)
1773 = build_tree_list (NULL_TREE,
1774 make_tree (build_pointer_type (void_type_node), y));
1775 TREE_CHAIN (TREE_CHAIN (arg_list))
1776 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1777 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1779 /* Now we have to build up the CALL_EXPR itself. */
1780 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1781 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1782 call_expr, arg_list, NULL_TREE);
1783 TREE_SIDE_EFFECTS (call_expr) = 1;
1785 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1787 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1788 VOIDmode, 3, y, Pmode, x, Pmode,
1789 convert_to_mode (TYPE_MODE (integer_type_node), size,
1790 TREE_UNSIGNED (integer_type_node)),
1791 TYPE_MODE (integer_type_node));
1798 /* Copy all or part of a value X into registers starting at REGNO.
1799 The number of registers to be filled is NREGS. */
1802 move_block_to_reg (regno, x, nregs, mode)
1806 enum machine_mode mode;
1809 #ifdef HAVE_load_multiple
1817 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1818 x = validize_mem (force_const_mem (mode, x));
1820 /* See if the machine can do this with a load multiple insn. */
1821 #ifdef HAVE_load_multiple
1822 if (HAVE_load_multiple)
1824 last = get_last_insn ();
1825 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1833 delete_insns_since (last);
1837 for (i = 0; i < nregs; i++)
1838 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1839 operand_subword_force (x, i, mode));
1842 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1843 The number of registers to be filled is NREGS. SIZE indicates the number
1844 of bytes in the object X. */
1847 move_block_from_reg (regno, x, nregs, size)
1854 #ifdef HAVE_store_multiple
1858 enum machine_mode mode;
1860 /* If SIZE is that of a mode no bigger than a word, just use that
1861 mode's store operation. */
1862 if (size <= UNITS_PER_WORD
1863 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1865 emit_move_insn (change_address (x, mode, NULL),
1866 gen_rtx_REG (mode, regno));
1870 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1871 to the left before storing to memory. Note that the previous test
1872 doesn't handle all cases (e.g. SIZE == 3). */
1873 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1875 rtx tem = operand_subword (x, 0, 1, BLKmode);
1881 shift = expand_shift (LSHIFT_EXPR, word_mode,
1882 gen_rtx_REG (word_mode, regno),
1883 build_int_2 ((UNITS_PER_WORD - size)
1884 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1885 emit_move_insn (tem, shift);
1889 /* See if the machine can do this with a store multiple insn. */
1890 #ifdef HAVE_store_multiple
1891 if (HAVE_store_multiple)
1893 last = get_last_insn ();
1894 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1902 delete_insns_since (last);
1906 for (i = 0; i < nregs; i++)
1908 rtx tem = operand_subword (x, i, 1, BLKmode);
1913 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1917 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1918 registers represented by a PARALLEL. SSIZE represents the total size of
1919 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1921 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1922 the balance will be in what would be the low-order memory addresses, i.e.
1923 left justified for big endian, right justified for little endian. This
1924 happens to be true for the targets currently using this support. If this
1925 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1929 emit_group_load (dst, orig_src, ssize, align)
1937 if (GET_CODE (dst) != PARALLEL)
1940 /* Check for a NULL entry, used to indicate that the parameter goes
1941 both on the stack and in registers. */
1942 if (XEXP (XVECEXP (dst, 0, 0), 0))
1947 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1949 /* If we won't be loading directly from memory, protect the real source
1950 from strange tricks we might play. */
1952 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
1954 if (GET_MODE (src) == VOIDmode)
1955 src = gen_reg_rtx (GET_MODE (dst));
1957 src = gen_reg_rtx (GET_MODE (orig_src));
1958 emit_move_insn (src, orig_src);
1961 /* Process the pieces. */
1962 for (i = start; i < XVECLEN (dst, 0); i++)
1964 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1965 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1966 unsigned int bytelen = GET_MODE_SIZE (mode);
1969 /* Handle trailing fragments that run over the size of the struct. */
1970 if (ssize >= 0 && bytepos + bytelen > ssize)
1972 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1973 bytelen = ssize - bytepos;
1978 /* Optimize the access just a bit. */
1979 if (GET_CODE (src) == MEM
1980 && align >= GET_MODE_ALIGNMENT (mode)
1981 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1982 && bytelen == GET_MODE_SIZE (mode))
1984 tmps[i] = gen_reg_rtx (mode);
1985 emit_move_insn (tmps[i],
1986 change_address (src, mode,
1987 plus_constant (XEXP (src, 0),
1990 else if (GET_CODE (src) == CONCAT)
1993 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1994 tmps[i] = XEXP (src, 0);
1995 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1996 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1997 tmps[i] = XEXP (src, 1);
2001 else if ((CONSTANT_P (src)
2002 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
2003 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2006 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2007 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2008 mode, mode, align, ssize);
2010 if (BYTES_BIG_ENDIAN && shift)
2011 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2012 tmps[i], 0, OPTAB_WIDEN);
2017 /* Copy the extracted pieces into the proper (probable) hard regs. */
2018 for (i = start; i < XVECLEN (dst, 0); i++)
2019 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2022 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2023 registers represented by a PARALLEL. SSIZE represents the total size of
2024 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2027 emit_group_store (orig_dst, src, ssize, align)
2035 if (GET_CODE (src) != PARALLEL)
2038 /* Check for a NULL entry, used to indicate that the parameter goes
2039 both on the stack and in registers. */
2040 if (XEXP (XVECEXP (src, 0, 0), 0))
2045 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2047 /* Copy the (probable) hard regs into pseudos. */
2048 for (i = start; i < XVECLEN (src, 0); i++)
2050 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2051 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2052 emit_move_insn (tmps[i], reg);
2056 /* If we won't be storing directly into memory, protect the real destination
2057 from strange tricks we might play. */
2059 if (GET_CODE (dst) == PARALLEL)
2063 /* We can get a PARALLEL dst if there is a conditional expression in
2064 a return statement. In that case, the dst and src are the same,
2065 so no action is necessary. */
2066 if (rtx_equal_p (dst, src))
2069 /* It is unclear if we can ever reach here, but we may as well handle
2070 it. Allocate a temporary, and split this into a store/load to/from
2073 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2074 emit_group_store (temp, src, ssize, align);
2075 emit_group_load (dst, temp, ssize, align);
2078 else if (GET_CODE (dst) != MEM)
2080 dst = gen_reg_rtx (GET_MODE (orig_dst));
2081 /* Make life a bit easier for combine. */
2082 emit_move_insn (dst, const0_rtx);
2085 /* Process the pieces. */
2086 for (i = start; i < XVECLEN (src, 0); i++)
2088 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2089 enum machine_mode mode = GET_MODE (tmps[i]);
2090 unsigned int bytelen = GET_MODE_SIZE (mode);
2092 /* Handle trailing fragments that run over the size of the struct. */
2093 if (ssize >= 0 && bytepos + bytelen > ssize)
2095 if (BYTES_BIG_ENDIAN)
2097 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2098 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2099 tmps[i], 0, OPTAB_WIDEN);
2101 bytelen = ssize - bytepos;
2104 /* Optimize the access just a bit. */
2105 if (GET_CODE (dst) == MEM
2106 && align >= GET_MODE_ALIGNMENT (mode)
2107 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2108 && bytelen == GET_MODE_SIZE (mode))
2109 emit_move_insn (change_address (dst, mode,
2110 plus_constant (XEXP (dst, 0),
2114 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2115 mode, tmps[i], align, ssize);
2120 /* Copy from the pseudo into the (probable) hard reg. */
2121 if (GET_CODE (dst) == REG)
2122 emit_move_insn (orig_dst, dst);
2125 /* Generate code to copy a BLKmode object of TYPE out of a
2126 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2127 is null, a stack temporary is created. TGTBLK is returned.
2129 The primary purpose of this routine is to handle functions
2130 that return BLKmode structures in registers. Some machines
2131 (the PA for example) want to return all small structures
2132 in registers regardless of the structure's alignment. */
2135 copy_blkmode_from_reg (tgtblk, srcreg, type)
2140 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2141 rtx src = NULL, dst = NULL;
2142 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2143 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2147 tgtblk = assign_temp (build_qualified_type (type,
2149 | TYPE_QUAL_CONST)),
2151 preserve_temp_slots (tgtblk);
2154 /* This code assumes srcreg is at least a full word. If it isn't,
2155 copy it into a new pseudo which is a full word. */
2156 if (GET_MODE (srcreg) != BLKmode
2157 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2158 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2160 /* Structures whose size is not a multiple of a word are aligned
2161 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2162 machine, this means we must skip the empty high order bytes when
2163 calculating the bit offset. */
2164 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2165 big_endian_correction
2166 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2168 /* Copy the structure BITSIZE bites at a time.
2170 We could probably emit more efficient code for machines which do not use
2171 strict alignment, but it doesn't seem worth the effort at the current
2173 for (bitpos = 0, xbitpos = big_endian_correction;
2174 bitpos < bytes * BITS_PER_UNIT;
2175 bitpos += bitsize, xbitpos += bitsize)
2177 /* We need a new source operand each time xbitpos is on a
2178 word boundary and when xbitpos == big_endian_correction
2179 (the first time through). */
2180 if (xbitpos % BITS_PER_WORD == 0
2181 || xbitpos == big_endian_correction)
2182 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2184 /* We need a new destination operand each time bitpos is on
2186 if (bitpos % BITS_PER_WORD == 0)
2187 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2189 /* Use xbitpos for the source extraction (right justified) and
2190 xbitpos for the destination store (left justified). */
2191 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2192 extract_bit_field (src, bitsize,
2193 xbitpos % BITS_PER_WORD, 1,
2194 NULL_RTX, word_mode, word_mode,
2195 bitsize, BITS_PER_WORD),
2196 bitsize, BITS_PER_WORD);
2202 /* Add a USE expression for REG to the (possibly empty) list pointed
2203 to by CALL_FUSAGE. REG must denote a hard register. */
2206 use_reg (call_fusage, reg)
2207 rtx *call_fusage, reg;
2209 if (GET_CODE (reg) != REG
2210 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2214 = gen_rtx_EXPR_LIST (VOIDmode,
2215 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2218 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2219 starting at REGNO. All of these registers must be hard registers. */
2222 use_regs (call_fusage, regno, nregs)
2229 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2232 for (i = 0; i < nregs; i++)
2233 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2236 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2237 PARALLEL REGS. This is for calls that pass values in multiple
2238 non-contiguous locations. The Irix 6 ABI has examples of this. */
2241 use_group_regs (call_fusage, regs)
2247 for (i = 0; i < XVECLEN (regs, 0); i++)
2249 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2251 /* A NULL entry means the parameter goes both on the stack and in
2252 registers. This can also be a MEM for targets that pass values
2253 partially on the stack and partially in registers. */
2254 if (reg != 0 && GET_CODE (reg) == REG)
2255 use_reg (call_fusage, reg);
2261 can_store_by_pieces (len, constfun, constfundata, align)
2262 unsigned HOST_WIDE_INT len;
2263 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2267 unsigned HOST_WIDE_INT max_size, l;
2268 HOST_WIDE_INT offset = 0;
2269 enum machine_mode mode, tmode;
2270 enum insn_code icode;
2274 if (! MOVE_BY_PIECES_P (len, align))
2277 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2278 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2279 align = MOVE_MAX * BITS_PER_UNIT;
2281 /* We would first store what we can in the largest integer mode, then go to
2282 successively smaller modes. */
2285 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2290 max_size = MOVE_MAX_PIECES + 1;
2291 while (max_size > 1)
2293 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2294 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2295 if (GET_MODE_SIZE (tmode) < max_size)
2298 if (mode == VOIDmode)
2301 icode = mov_optab->handlers[(int) mode].insn_code;
2302 if (icode != CODE_FOR_nothing
2303 && align >= GET_MODE_ALIGNMENT (mode))
2305 unsigned int size = GET_MODE_SIZE (mode);
2312 cst = (*constfun) (constfundata, offset, mode);
2313 if (!LEGITIMATE_CONSTANT_P (cst))
2323 max_size = GET_MODE_SIZE (mode);
2326 /* The code above should have handled everything. */
2334 /* Generate several move instructions to store LEN bytes generated by
2335 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2336 pointer which will be passed as argument in every CONSTFUN call.
2337 ALIGN is maximum alignment we can assume. */
2340 store_by_pieces (to, len, constfun, constfundata, align)
2342 unsigned HOST_WIDE_INT len;
2343 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2347 struct store_by_pieces data;
2349 if (! MOVE_BY_PIECES_P (len, align))
2351 to = protect_from_queue (to, 1);
2352 data.constfun = constfun;
2353 data.constfundata = constfundata;
2356 store_by_pieces_1 (&data, align);
2359 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2360 rtx with BLKmode). The caller must pass TO through protect_from_queue
2361 before calling. ALIGN is maximum alignment we can assume. */
2364 clear_by_pieces (to, len, align)
2366 unsigned HOST_WIDE_INT len;
2369 struct store_by_pieces data;
2371 data.constfun = clear_by_pieces_1;
2372 data.constfundata = NULL_PTR;
2375 store_by_pieces_1 (&data, align);
2378 /* Callback routine for clear_by_pieces.
2379 Return const0_rtx unconditionally. */
2382 clear_by_pieces_1 (data, offset, mode)
2383 PTR data ATTRIBUTE_UNUSED;
2384 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2385 enum machine_mode mode ATTRIBUTE_UNUSED;
2390 /* Subroutine of clear_by_pieces and store_by_pieces.
2391 Generate several move instructions to store LEN bytes of block TO. (A MEM
2392 rtx with BLKmode). The caller must pass TO through protect_from_queue
2393 before calling. ALIGN is maximum alignment we can assume. */
2396 store_by_pieces_1 (data, align)
2397 struct store_by_pieces *data;
2400 rtx to_addr = XEXP (data->to, 0);
2401 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2402 enum machine_mode mode = VOIDmode, tmode;
2403 enum insn_code icode;
2406 data->to_addr = to_addr;
2408 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2409 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2411 data->explicit_inc_to = 0;
2413 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2415 data->offset = data->len;
2417 /* If storing requires more than two move insns,
2418 copy addresses to registers (to make displacements shorter)
2419 and use post-increment if available. */
2420 if (!data->autinc_to
2421 && move_by_pieces_ninsns (data->len, align) > 2)
2423 /* Determine the main mode we'll be using. */
2424 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2425 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2426 if (GET_MODE_SIZE (tmode) < max_size)
2429 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2431 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2432 data->autinc_to = 1;
2433 data->explicit_inc_to = -1;
2436 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2437 && ! data->autinc_to)
2439 data->to_addr = copy_addr_to_reg (to_addr);
2440 data->autinc_to = 1;
2441 data->explicit_inc_to = 1;
2444 if ( !data->autinc_to && CONSTANT_P (to_addr))
2445 data->to_addr = copy_addr_to_reg (to_addr);
2448 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2449 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2450 align = MOVE_MAX * BITS_PER_UNIT;
2452 /* First store what we can in the largest integer mode, then go to
2453 successively smaller modes. */
2455 while (max_size > 1)
2457 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2458 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2459 if (GET_MODE_SIZE (tmode) < max_size)
2462 if (mode == VOIDmode)
2465 icode = mov_optab->handlers[(int) mode].insn_code;
2466 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2467 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2469 max_size = GET_MODE_SIZE (mode);
2472 /* The code above should have handled everything. */
2477 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2478 with move instructions for mode MODE. GENFUN is the gen_... function
2479 to make a move insn for that mode. DATA has all the other info. */
2482 store_by_pieces_2 (genfun, mode, data)
2483 rtx (*genfun) PARAMS ((rtx, ...));
2484 enum machine_mode mode;
2485 struct store_by_pieces *data;
2487 unsigned int size = GET_MODE_SIZE (mode);
2490 while (data->len >= size)
2493 data->offset -= size;
2495 if (data->autinc_to)
2497 to1 = gen_rtx_MEM (mode, data->to_addr);
2498 MEM_COPY_ATTRIBUTES (to1, data->to);
2501 to1 = change_address (data->to, mode,
2502 plus_constant (data->to_addr, data->offset));
2504 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2505 emit_insn (gen_add2_insn (data->to_addr,
2506 GEN_INT (-(HOST_WIDE_INT) size)));
2508 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2509 emit_insn ((*genfun) (to1, cst));
2511 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2512 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2514 if (! data->reverse)
2515 data->offset += size;
2521 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2522 its length in bytes and ALIGN is the maximum alignment we can is has.
2524 If we call a function that returns the length of the block, return it. */
2527 clear_storage (object, size, align)
2532 #ifdef TARGET_MEM_FUNCTIONS
2534 tree call_expr, arg_list;
2538 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2539 just move a zero. Otherwise, do this a piece at a time. */
2540 if (GET_MODE (object) != BLKmode
2541 && GET_CODE (size) == CONST_INT
2542 && GET_MODE_SIZE (GET_MODE (object)) == INTVAL (size))
2543 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2546 object = protect_from_queue (object, 1);
2547 size = protect_from_queue (size, 0);
2549 if (GET_CODE (size) == CONST_INT
2550 && MOVE_BY_PIECES_P (INTVAL (size), align))
2551 clear_by_pieces (object, INTVAL (size), align);
2554 /* Try the most limited insn first, because there's no point
2555 including more than one in the machine description unless
2556 the more limited one has some advantage. */
2558 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2559 enum machine_mode mode;
2561 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2562 mode = GET_MODE_WIDER_MODE (mode))
2564 enum insn_code code = clrstr_optab[(int) mode];
2565 insn_operand_predicate_fn pred;
2567 if (code != CODE_FOR_nothing
2568 /* We don't need MODE to be narrower than
2569 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2570 the mode mask, as it is returned by the macro, it will
2571 definitely be less than the actual mode mask. */
2572 && ((GET_CODE (size) == CONST_INT
2573 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2574 <= (GET_MODE_MASK (mode) >> 1)))
2575 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2576 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2577 || (*pred) (object, BLKmode))
2578 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2579 || (*pred) (opalign, VOIDmode)))
2582 rtx last = get_last_insn ();
2585 op1 = convert_to_mode (mode, size, 1);
2586 pred = insn_data[(int) code].operand[1].predicate;
2587 if (pred != 0 && ! (*pred) (op1, mode))
2588 op1 = copy_to_mode_reg (mode, op1);
2590 pat = GEN_FCN ((int) code) (object, op1, opalign);
2597 delete_insns_since (last);
2601 /* OBJECT or SIZE may have been passed through protect_from_queue.
2603 It is unsafe to save the value generated by protect_from_queue
2604 and reuse it later. Consider what happens if emit_queue is
2605 called before the return value from protect_from_queue is used.
2607 Expansion of the CALL_EXPR below will call emit_queue before
2608 we are finished emitting RTL for argument setup. So if we are
2609 not careful we could get the wrong value for an argument.
2611 To avoid this problem we go ahead and emit code to copy OBJECT
2612 and SIZE into new pseudos. We can then place those new pseudos
2613 into an RTL_EXPR and use them later, even after a call to
2616 Note this is not strictly needed for library calls since they
2617 do not call emit_queue before loading their arguments. However,
2618 we may need to have library calls call emit_queue in the future
2619 since failing to do so could cause problems for targets which
2620 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2621 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2623 #ifdef TARGET_MEM_FUNCTIONS
2624 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2626 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2627 TREE_UNSIGNED (integer_type_node));
2628 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2631 #ifdef TARGET_MEM_FUNCTIONS
2632 /* It is incorrect to use the libcall calling conventions to call
2633 memset in this context.
2635 This could be a user call to memset and the user may wish to
2636 examine the return value from memset.
2638 For targets where libcalls and normal calls have different
2639 conventions for returning pointers, we could end up generating
2642 So instead of using a libcall sequence we build up a suitable
2643 CALL_EXPR and expand the call in the normal fashion. */
2644 if (fn == NULL_TREE)
2648 /* This was copied from except.c, I don't know if all this is
2649 necessary in this context or not. */
2650 fn = get_identifier ("memset");
2651 fntype = build_pointer_type (void_type_node);
2652 fntype = build_function_type (fntype, NULL_TREE);
2653 fn = build_decl (FUNCTION_DECL, fn, fntype);
2654 ggc_add_tree_root (&fn, 1);
2655 DECL_EXTERNAL (fn) = 1;
2656 TREE_PUBLIC (fn) = 1;
2657 DECL_ARTIFICIAL (fn) = 1;
2658 make_decl_rtl (fn, NULL_PTR);
2659 assemble_external (fn);
2662 /* We need to make an argument list for the function call.
2664 memset has three arguments, the first is a void * addresses, the
2665 second a integer with the initialization value, the last is a
2666 size_t byte count for the copy. */
2668 = build_tree_list (NULL_TREE,
2669 make_tree (build_pointer_type (void_type_node),
2671 TREE_CHAIN (arg_list)
2672 = build_tree_list (NULL_TREE,
2673 make_tree (integer_type_node, const0_rtx));
2674 TREE_CHAIN (TREE_CHAIN (arg_list))
2675 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2676 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2678 /* Now we have to build up the CALL_EXPR itself. */
2679 call_expr = build1 (ADDR_EXPR,
2680 build_pointer_type (TREE_TYPE (fn)), fn);
2681 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2682 call_expr, arg_list, NULL_TREE);
2683 TREE_SIDE_EFFECTS (call_expr) = 1;
2685 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2687 emit_library_call (bzero_libfunc, LCT_NORMAL,
2688 VOIDmode, 2, object, Pmode, size,
2689 TYPE_MODE (integer_type_node));
2697 /* Generate code to copy Y into X.
2698 Both Y and X must have the same mode, except that
2699 Y can be a constant with VOIDmode.
2700 This mode cannot be BLKmode; use emit_block_move for that.
2702 Return the last instruction emitted. */
2705 emit_move_insn (x, y)
2708 enum machine_mode mode = GET_MODE (x);
2710 x = protect_from_queue (x, 1);
2711 y = protect_from_queue (y, 0);
2713 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2716 /* Never force constant_p_rtx to memory. */
2717 if (GET_CODE (y) == CONSTANT_P_RTX)
2719 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2720 y = force_const_mem (mode, y);
2722 /* If X or Y are memory references, verify that their addresses are valid
2724 if (GET_CODE (x) == MEM
2725 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2726 && ! push_operand (x, GET_MODE (x)))
2728 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2729 x = change_address (x, VOIDmode, XEXP (x, 0));
2731 if (GET_CODE (y) == MEM
2732 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2734 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2735 y = change_address (y, VOIDmode, XEXP (y, 0));
2737 if (mode == BLKmode)
2740 return emit_move_insn_1 (x, y);
2743 /* Low level part of emit_move_insn.
2744 Called just like emit_move_insn, but assumes X and Y
2745 are basically valid. */
2748 emit_move_insn_1 (x, y)
2751 enum machine_mode mode = GET_MODE (x);
2752 enum machine_mode submode;
2753 enum mode_class class = GET_MODE_CLASS (mode);
2756 if (mode >= MAX_MACHINE_MODE)
2759 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2761 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2763 /* Expand complex moves by moving real part and imag part, if possible. */
2764 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2765 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2767 (class == MODE_COMPLEX_INT
2768 ? MODE_INT : MODE_FLOAT),
2770 && (mov_optab->handlers[(int) submode].insn_code
2771 != CODE_FOR_nothing))
2773 /* Don't split destination if it is a stack push. */
2774 int stack = push_operand (x, GET_MODE (x));
2776 /* If this is a stack, push the highpart first, so it
2777 will be in the argument order.
2779 In that case, change_address is used only to convert
2780 the mode, not to change the address. */
2783 /* Note that the real part always precedes the imag part in memory
2784 regardless of machine's endianness. */
2785 #ifdef STACK_GROWS_DOWNWARD
2786 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2787 (gen_rtx_MEM (submode, XEXP (x, 0)),
2788 gen_imagpart (submode, y)));
2789 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2790 (gen_rtx_MEM (submode, XEXP (x, 0)),
2791 gen_realpart (submode, y)));
2793 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2794 (gen_rtx_MEM (submode, XEXP (x, 0)),
2795 gen_realpart (submode, y)));
2796 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2797 (gen_rtx_MEM (submode, XEXP (x, 0)),
2798 gen_imagpart (submode, y)));
2803 rtx realpart_x, realpart_y;
2804 rtx imagpart_x, imagpart_y;
2806 /* If this is a complex value with each part being smaller than a
2807 word, the usual calling sequence will likely pack the pieces into
2808 a single register. Unfortunately, SUBREG of hard registers only
2809 deals in terms of words, so we have a problem converting input
2810 arguments to the CONCAT of two registers that is used elsewhere
2811 for complex values. If this is before reload, we can copy it into
2812 memory and reload. FIXME, we should see about using extract and
2813 insert on integer registers, but complex short and complex char
2814 variables should be rarely used. */
2815 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2816 && (reload_in_progress | reload_completed) == 0)
2818 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2819 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2821 if (packed_dest_p || packed_src_p)
2823 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2824 ? MODE_FLOAT : MODE_INT);
2826 enum machine_mode reg_mode
2827 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2829 if (reg_mode != BLKmode)
2831 rtx mem = assign_stack_temp (reg_mode,
2832 GET_MODE_SIZE (mode), 0);
2833 rtx cmem = change_address (mem, mode, NULL_RTX);
2836 = N_("function using short complex types cannot be inline");
2840 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2841 emit_move_insn_1 (cmem, y);
2842 return emit_move_insn_1 (sreg, mem);
2846 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2847 emit_move_insn_1 (mem, sreg);
2848 return emit_move_insn_1 (x, cmem);
2854 realpart_x = gen_realpart (submode, x);
2855 realpart_y = gen_realpart (submode, y);
2856 imagpart_x = gen_imagpart (submode, x);
2857 imagpart_y = gen_imagpart (submode, y);
2859 /* Show the output dies here. This is necessary for SUBREGs
2860 of pseudos since we cannot track their lifetimes correctly;
2861 hard regs shouldn't appear here except as return values.
2862 We never want to emit such a clobber after reload. */
2864 && ! (reload_in_progress || reload_completed)
2865 && (GET_CODE (realpart_x) == SUBREG
2866 || GET_CODE (imagpart_x) == SUBREG))
2868 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2872 (realpart_x, realpart_y));
2873 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2874 (imagpart_x, imagpart_y));
2877 return get_last_insn ();
2880 /* This will handle any multi-word mode that lacks a move_insn pattern.
2881 However, you will get better code if you define such patterns,
2882 even if they must turn into multiple assembler instructions. */
2883 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2889 #ifdef PUSH_ROUNDING
2891 /* If X is a push on the stack, do the push now and replace
2892 X with a reference to the stack pointer. */
2893 if (push_operand (x, GET_MODE (x)))
2895 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2896 x = change_address (x, VOIDmode, stack_pointer_rtx);
2900 /* If we are in reload, see if either operand is a MEM whose address
2901 is scheduled for replacement. */
2902 if (reload_in_progress && GET_CODE (x) == MEM
2903 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2905 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2907 MEM_COPY_ATTRIBUTES (new, x);
2910 if (reload_in_progress && GET_CODE (y) == MEM
2911 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2913 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2915 MEM_COPY_ATTRIBUTES (new, y);
2923 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2926 rtx xpart = operand_subword (x, i, 1, mode);
2927 rtx ypart = operand_subword (y, i, 1, mode);
2929 /* If we can't get a part of Y, put Y into memory if it is a
2930 constant. Otherwise, force it into a register. If we still
2931 can't get a part of Y, abort. */
2932 if (ypart == 0 && CONSTANT_P (y))
2934 y = force_const_mem (mode, y);
2935 ypart = operand_subword (y, i, 1, mode);
2937 else if (ypart == 0)
2938 ypart = operand_subword_force (y, i, mode);
2940 if (xpart == 0 || ypart == 0)
2943 need_clobber |= (GET_CODE (xpart) == SUBREG);
2945 last_insn = emit_move_insn (xpart, ypart);
2948 seq = gen_sequence ();
2951 /* Show the output dies here. This is necessary for SUBREGs
2952 of pseudos since we cannot track their lifetimes correctly;
2953 hard regs shouldn't appear here except as return values.
2954 We never want to emit such a clobber after reload. */
2956 && ! (reload_in_progress || reload_completed)
2957 && need_clobber != 0)
2959 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2970 /* Pushing data onto the stack. */
2972 /* Push a block of length SIZE (perhaps variable)
2973 and return an rtx to address the beginning of the block.
2974 Note that it is not possible for the value returned to be a QUEUED.
2975 The value may be virtual_outgoing_args_rtx.
2977 EXTRA is the number of bytes of padding to push in addition to SIZE.
2978 BELOW nonzero means this padding comes at low addresses;
2979 otherwise, the padding comes at high addresses. */
2982 push_block (size, extra, below)
2988 size = convert_modes (Pmode, ptr_mode, size, 1);
2989 if (CONSTANT_P (size))
2990 anti_adjust_stack (plus_constant (size, extra));
2991 else if (GET_CODE (size) == REG && extra == 0)
2992 anti_adjust_stack (size);
2995 temp = copy_to_mode_reg (Pmode, size);
2997 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2998 temp, 0, OPTAB_LIB_WIDEN);
2999 anti_adjust_stack (temp);
3002 #ifndef STACK_GROWS_DOWNWARD
3003 #ifdef ARGS_GROW_DOWNWARD
3004 if (!ACCUMULATE_OUTGOING_ARGS)
3012 /* Return the lowest stack address when STACK or ARGS grow downward and
3013 we are not aaccumulating outgoing arguments (the c4x port uses such
3015 temp = virtual_outgoing_args_rtx;
3016 if (extra != 0 && below)
3017 temp = plus_constant (temp, extra);
3021 if (GET_CODE (size) == CONST_INT)
3022 temp = plus_constant (virtual_outgoing_args_rtx,
3023 -INTVAL (size) - (below ? 0 : extra));
3024 else if (extra != 0 && !below)
3025 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3026 negate_rtx (Pmode, plus_constant (size, extra)));
3028 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3029 negate_rtx (Pmode, size));
3032 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3038 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3041 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3042 block of SIZE bytes. */
3045 get_push_address (size)
3050 if (STACK_PUSH_CODE == POST_DEC)
3051 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3052 else if (STACK_PUSH_CODE == POST_INC)
3053 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3055 temp = stack_pointer_rtx;
3057 return copy_to_reg (temp);
3060 /* Generate code to push X onto the stack, assuming it has mode MODE and
3062 MODE is redundant except when X is a CONST_INT (since they don't
3064 SIZE is an rtx for the size of data to be copied (in bytes),
3065 needed only if X is BLKmode.
3067 ALIGN is maximum alignment we can assume.
3069 If PARTIAL and REG are both nonzero, then copy that many of the first
3070 words of X into registers starting with REG, and push the rest of X.
3071 The amount of space pushed is decreased by PARTIAL words,
3072 rounded *down* to a multiple of PARM_BOUNDARY.
3073 REG must be a hard register in this case.
3074 If REG is zero but PARTIAL is not, take any all others actions for an
3075 argument partially in registers, but do not actually load any
3078 EXTRA is the amount in bytes of extra space to leave next to this arg.
3079 This is ignored if an argument block has already been allocated.
3081 On a machine that lacks real push insns, ARGS_ADDR is the address of
3082 the bottom of the argument block for this call. We use indexing off there
3083 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3084 argument block has not been preallocated.
3086 ARGS_SO_FAR is the size of args previously pushed for this call.
3088 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3089 for arguments passed in registers. If nonzero, it will be the number
3090 of bytes required. */
3093 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3094 args_addr, args_so_far, reg_parm_stack_space,
3097 enum machine_mode mode;
3106 int reg_parm_stack_space;
3110 enum direction stack_direction
3111 #ifdef STACK_GROWS_DOWNWARD
3117 /* Decide where to pad the argument: `downward' for below,
3118 `upward' for above, or `none' for don't pad it.
3119 Default is below for small data on big-endian machines; else above. */
3120 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3122 /* Invert direction if stack is post-update. */
3123 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3124 if (where_pad != none)
3125 where_pad = (where_pad == downward ? upward : downward);
3127 xinner = x = protect_from_queue (x, 0);
3129 if (mode == BLKmode)
3131 /* Copy a block into the stack, entirely or partially. */
3134 int used = partial * UNITS_PER_WORD;
3135 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3143 /* USED is now the # of bytes we need not copy to the stack
3144 because registers will take care of them. */
3147 xinner = change_address (xinner, BLKmode,
3148 plus_constant (XEXP (xinner, 0), used));
3150 /* If the partial register-part of the arg counts in its stack size,
3151 skip the part of stack space corresponding to the registers.
3152 Otherwise, start copying to the beginning of the stack space,
3153 by setting SKIP to 0. */
3154 skip = (reg_parm_stack_space == 0) ? 0 : used;
3156 #ifdef PUSH_ROUNDING
3157 /* Do it with several push insns if that doesn't take lots of insns
3158 and if there is no difficulty with push insns that skip bytes
3159 on the stack for alignment purposes. */
3162 && GET_CODE (size) == CONST_INT
3164 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3165 /* Here we avoid the case of a structure whose weak alignment
3166 forces many pushes of a small amount of data,
3167 and such small pushes do rounding that causes trouble. */
3168 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3169 || align >= BIGGEST_ALIGNMENT
3170 || PUSH_ROUNDING (align) == align)
3171 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3173 /* Push padding now if padding above and stack grows down,
3174 or if padding below and stack grows up.
3175 But if space already allocated, this has already been done. */
3176 if (extra && args_addr == 0
3177 && where_pad != none && where_pad != stack_direction)
3178 anti_adjust_stack (GEN_INT (extra));
3180 stack_pointer_delta += INTVAL (size) - used;
3181 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3182 INTVAL (size) - used, align);
3184 if (current_function_check_memory_usage && ! in_check_memory_usage)
3188 in_check_memory_usage = 1;
3189 temp = get_push_address (INTVAL (size) - used);
3190 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3191 emit_library_call (chkr_copy_bitmap_libfunc,
3192 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3193 Pmode, XEXP (xinner, 0), Pmode,
3194 GEN_INT (INTVAL (size) - used),
3195 TYPE_MODE (sizetype));
3197 emit_library_call (chkr_set_right_libfunc,
3198 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3199 Pmode, GEN_INT (INTVAL (size) - used),
3200 TYPE_MODE (sizetype),
3201 GEN_INT (MEMORY_USE_RW),
3202 TYPE_MODE (integer_type_node));
3203 in_check_memory_usage = 0;
3207 #endif /* PUSH_ROUNDING */
3211 /* Otherwise make space on the stack and copy the data
3212 to the address of that space. */
3214 /* Deduct words put into registers from the size we must copy. */
3217 if (GET_CODE (size) == CONST_INT)
3218 size = GEN_INT (INTVAL (size) - used);
3220 size = expand_binop (GET_MODE (size), sub_optab, size,
3221 GEN_INT (used), NULL_RTX, 0,
3225 /* Get the address of the stack space.
3226 In this case, we do not deal with EXTRA separately.
3227 A single stack adjust will do. */
3230 temp = push_block (size, extra, where_pad == downward);
3233 else if (GET_CODE (args_so_far) == CONST_INT)
3234 temp = memory_address (BLKmode,
3235 plus_constant (args_addr,
3236 skip + INTVAL (args_so_far)));
3238 temp = memory_address (BLKmode,
3239 plus_constant (gen_rtx_PLUS (Pmode,
3243 if (current_function_check_memory_usage && ! in_check_memory_usage)
3245 in_check_memory_usage = 1;
3246 target = copy_to_reg (temp);
3247 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3248 emit_library_call (chkr_copy_bitmap_libfunc,
3249 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3251 XEXP (xinner, 0), Pmode,
3252 size, TYPE_MODE (sizetype));
3254 emit_library_call (chkr_set_right_libfunc,
3255 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3257 size, TYPE_MODE (sizetype),
3258 GEN_INT (MEMORY_USE_RW),
3259 TYPE_MODE (integer_type_node));
3260 in_check_memory_usage = 0;
3263 target = gen_rtx_MEM (BLKmode, temp);
3267 set_mem_attributes (target, type, 1);
3268 /* Function incoming arguments may overlap with sibling call
3269 outgoing arguments and we cannot allow reordering of reads
3270 from function arguments with stores to outgoing arguments
3271 of sibling calls. */
3272 MEM_ALIAS_SET (target) = 0;
3275 /* TEMP is the address of the block. Copy the data there. */
3276 if (GET_CODE (size) == CONST_INT
3277 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3279 move_by_pieces (target, xinner, INTVAL (size), align);
3284 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3285 enum machine_mode mode;
3287 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3289 mode = GET_MODE_WIDER_MODE (mode))
3291 enum insn_code code = movstr_optab[(int) mode];
3292 insn_operand_predicate_fn pred;
3294 if (code != CODE_FOR_nothing
3295 && ((GET_CODE (size) == CONST_INT
3296 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3297 <= (GET_MODE_MASK (mode) >> 1)))
3298 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3299 && (!(pred = insn_data[(int) code].operand[0].predicate)
3300 || ((*pred) (target, BLKmode)))
3301 && (!(pred = insn_data[(int) code].operand[1].predicate)
3302 || ((*pred) (xinner, BLKmode)))
3303 && (!(pred = insn_data[(int) code].operand[3].predicate)
3304 || ((*pred) (opalign, VOIDmode))))
3306 rtx op2 = convert_to_mode (mode, size, 1);
3307 rtx last = get_last_insn ();
3310 pred = insn_data[(int) code].operand[2].predicate;
3311 if (pred != 0 && ! (*pred) (op2, mode))
3312 op2 = copy_to_mode_reg (mode, op2);
3314 pat = GEN_FCN ((int) code) (target, xinner,
3322 delete_insns_since (last);
3327 if (!ACCUMULATE_OUTGOING_ARGS)
3329 /* If the source is referenced relative to the stack pointer,
3330 copy it to another register to stabilize it. We do not need
3331 to do this if we know that we won't be changing sp. */
3333 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3334 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3335 temp = copy_to_reg (temp);
3338 /* Make inhibit_defer_pop nonzero around the library call
3339 to force it to pop the bcopy-arguments right away. */
3341 #ifdef TARGET_MEM_FUNCTIONS
3342 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3343 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3344 convert_to_mode (TYPE_MODE (sizetype),
3345 size, TREE_UNSIGNED (sizetype)),
3346 TYPE_MODE (sizetype));
3348 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3349 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3350 convert_to_mode (TYPE_MODE (integer_type_node),
3352 TREE_UNSIGNED (integer_type_node)),
3353 TYPE_MODE (integer_type_node));
3358 else if (partial > 0)
3360 /* Scalar partly in registers. */
3362 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3365 /* # words of start of argument
3366 that we must make space for but need not store. */
3367 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3368 int args_offset = INTVAL (args_so_far);
3371 /* Push padding now if padding above and stack grows down,
3372 or if padding below and stack grows up.
3373 But if space already allocated, this has already been done. */
3374 if (extra && args_addr == 0
3375 && where_pad != none && where_pad != stack_direction)
3376 anti_adjust_stack (GEN_INT (extra));
3378 /* If we make space by pushing it, we might as well push
3379 the real data. Otherwise, we can leave OFFSET nonzero
3380 and leave the space uninitialized. */
3384 /* Now NOT_STACK gets the number of words that we don't need to
3385 allocate on the stack. */
3386 not_stack = partial - offset;
3388 /* If the partial register-part of the arg counts in its stack size,
3389 skip the part of stack space corresponding to the registers.
3390 Otherwise, start copying to the beginning of the stack space,
3391 by setting SKIP to 0. */
3392 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3394 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3395 x = validize_mem (force_const_mem (mode, x));
3397 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3398 SUBREGs of such registers are not allowed. */
3399 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3400 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3401 x = copy_to_reg (x);
3403 /* Loop over all the words allocated on the stack for this arg. */
3404 /* We can do it by words, because any scalar bigger than a word
3405 has a size a multiple of a word. */
3406 #ifndef PUSH_ARGS_REVERSED
3407 for (i = not_stack; i < size; i++)
3409 for (i = size - 1; i >= not_stack; i--)
3411 if (i >= not_stack + offset)
3412 emit_push_insn (operand_subword_force (x, i, mode),
3413 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3415 GEN_INT (args_offset + ((i - not_stack + skip)
3417 reg_parm_stack_space, alignment_pad);
3422 rtx target = NULL_RTX;
3425 /* Push padding now if padding above and stack grows down,
3426 or if padding below and stack grows up.
3427 But if space already allocated, this has already been done. */
3428 if (extra && args_addr == 0
3429 && where_pad != none && where_pad != stack_direction)
3430 anti_adjust_stack (GEN_INT (extra));
3432 #ifdef PUSH_ROUNDING
3433 if (args_addr == 0 && PUSH_ARGS)
3435 addr = gen_push_operand ();
3436 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3441 if (GET_CODE (args_so_far) == CONST_INT)
3443 = memory_address (mode,
3444 plus_constant (args_addr,
3445 INTVAL (args_so_far)));
3447 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3452 dest = gen_rtx_MEM (mode, addr);
3455 set_mem_attributes (dest, type, 1);
3456 /* Function incoming arguments may overlap with sibling call
3457 outgoing arguments and we cannot allow reordering of reads
3458 from function arguments with stores to outgoing arguments
3459 of sibling calls. */
3460 MEM_ALIAS_SET (dest) = 0;
3463 emit_move_insn (dest, x);
3465 if (current_function_check_memory_usage && ! in_check_memory_usage)
3467 in_check_memory_usage = 1;
3469 target = get_push_address (GET_MODE_SIZE (mode));
3471 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3472 emit_library_call (chkr_copy_bitmap_libfunc,
3473 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3474 Pmode, XEXP (x, 0), Pmode,
3475 GEN_INT (GET_MODE_SIZE (mode)),
3476 TYPE_MODE (sizetype));
3478 emit_library_call (chkr_set_right_libfunc,
3479 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3480 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3481 TYPE_MODE (sizetype),
3482 GEN_INT (MEMORY_USE_RW),
3483 TYPE_MODE (integer_type_node));
3484 in_check_memory_usage = 0;
3489 /* If part should go in registers, copy that part
3490 into the appropriate registers. Do this now, at the end,
3491 since mem-to-mem copies above may do function calls. */
3492 if (partial > 0 && reg != 0)
3494 /* Handle calls that pass values in multiple non-contiguous locations.
3495 The Irix 6 ABI has examples of this. */
3496 if (GET_CODE (reg) == PARALLEL)
3497 emit_group_load (reg, x, -1, align); /* ??? size? */
3499 move_block_to_reg (REGNO (reg), x, partial, mode);
3502 if (extra && args_addr == 0 && where_pad == stack_direction)
3503 anti_adjust_stack (GEN_INT (extra));
3505 if (alignment_pad && args_addr == 0)
3506 anti_adjust_stack (alignment_pad);
3509 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3517 /* Only registers can be subtargets. */
3518 || GET_CODE (x) != REG
3519 /* If the register is readonly, it can't be set more than once. */
3520 || RTX_UNCHANGING_P (x)
3521 /* Don't use hard regs to avoid extending their life. */
3522 || REGNO (x) < FIRST_PSEUDO_REGISTER
3523 /* Avoid subtargets inside loops,
3524 since they hide some invariant expressions. */
3525 || preserve_subexpressions_p ())
3529 /* Expand an assignment that stores the value of FROM into TO.
3530 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3531 (This may contain a QUEUED rtx;
3532 if the value is constant, this rtx is a constant.)
3533 Otherwise, the returned value is NULL_RTX.
3535 SUGGEST_REG is no longer actually used.
3536 It used to mean, copy the value through a register
3537 and return that register, if that is possible.
3538 We now use WANT_VALUE to decide whether to do this. */
3541 expand_assignment (to, from, want_value, suggest_reg)
3544 int suggest_reg ATTRIBUTE_UNUSED;
3546 register rtx to_rtx = 0;
3549 /* Don't crash if the lhs of the assignment was erroneous. */
3551 if (TREE_CODE (to) == ERROR_MARK)
3553 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3554 return want_value ? result : NULL_RTX;
3557 /* Assignment of a structure component needs special treatment
3558 if the structure component's rtx is not simply a MEM.
3559 Assignment of an array element at a constant index, and assignment of
3560 an array element in an unaligned packed structure field, has the same
3563 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3564 || TREE_CODE (to) == ARRAY_REF)
3566 enum machine_mode mode1;
3567 HOST_WIDE_INT bitsize, bitpos;
3572 unsigned int alignment;
3575 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3576 &unsignedp, &volatilep, &alignment);
3578 /* If we are going to use store_bit_field and extract_bit_field,
3579 make sure to_rtx will be safe for multiple use. */
3581 if (mode1 == VOIDmode && want_value)
3582 tem = stabilize_reference (tem);
3584 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3587 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3589 if (GET_CODE (to_rtx) != MEM)
3592 if (GET_MODE (offset_rtx) != ptr_mode)
3594 #ifdef POINTERS_EXTEND_UNSIGNED
3595 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3597 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3601 /* A constant address in TO_RTX can have VOIDmode, we must not try
3602 to call force_reg for that case. Avoid that case. */
3603 if (GET_CODE (to_rtx) == MEM
3604 && GET_MODE (to_rtx) == BLKmode
3605 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3607 && (bitpos % bitsize) == 0
3608 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3609 && alignment == GET_MODE_ALIGNMENT (mode1))
3611 rtx temp = change_address (to_rtx, mode1,
3612 plus_constant (XEXP (to_rtx, 0),
3615 if (GET_CODE (XEXP (temp, 0)) == REG)
3618 to_rtx = change_address (to_rtx, mode1,
3619 force_reg (GET_MODE (XEXP (temp, 0)),
3624 to_rtx = change_address (to_rtx, VOIDmode,
3625 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3626 force_reg (ptr_mode,
3632 if (GET_CODE (to_rtx) == MEM)
3634 /* When the offset is zero, to_rtx is the address of the
3635 structure we are storing into, and hence may be shared.
3636 We must make a new MEM before setting the volatile bit. */
3638 to_rtx = copy_rtx (to_rtx);
3640 MEM_VOLATILE_P (to_rtx) = 1;
3642 #if 0 /* This was turned off because, when a field is volatile
3643 in an object which is not volatile, the object may be in a register,
3644 and then we would abort over here. */
3650 if (TREE_CODE (to) == COMPONENT_REF
3651 && TREE_READONLY (TREE_OPERAND (to, 1)))
3654 to_rtx = copy_rtx (to_rtx);
3656 RTX_UNCHANGING_P (to_rtx) = 1;
3659 /* Check the access. */
3660 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3665 enum machine_mode best_mode;
3667 best_mode = get_best_mode (bitsize, bitpos,
3668 TYPE_ALIGN (TREE_TYPE (tem)),
3670 if (best_mode == VOIDmode)
3673 best_mode_size = GET_MODE_BITSIZE (best_mode);
3674 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3675 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3676 size *= GET_MODE_SIZE (best_mode);
3678 /* Check the access right of the pointer. */
3679 in_check_memory_usage = 1;
3681 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3682 VOIDmode, 3, to_addr, Pmode,
3683 GEN_INT (size), TYPE_MODE (sizetype),
3684 GEN_INT (MEMORY_USE_WO),
3685 TYPE_MODE (integer_type_node));
3686 in_check_memory_usage = 0;
3689 /* If this is a varying-length object, we must get the address of
3690 the source and do an explicit block move. */
3693 unsigned int from_align;
3694 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3696 = change_address (to_rtx, VOIDmode,
3697 plus_constant (XEXP (to_rtx, 0),
3698 bitpos / BITS_PER_UNIT));
3700 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3701 MIN (alignment, from_align));
3708 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3710 /* Spurious cast for HPUX compiler. */
3711 ? ((enum machine_mode)
3712 TYPE_MODE (TREE_TYPE (to)))
3716 int_size_in_bytes (TREE_TYPE (tem)),
3717 get_alias_set (to));
3719 preserve_temp_slots (result);
3723 /* If the value is meaningful, convert RESULT to the proper mode.
3724 Otherwise, return nothing. */
3725 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3726 TYPE_MODE (TREE_TYPE (from)),
3728 TREE_UNSIGNED (TREE_TYPE (to)))
3733 /* If the rhs is a function call and its value is not an aggregate,
3734 call the function before we start to compute the lhs.
3735 This is needed for correct code for cases such as
3736 val = setjmp (buf) on machines where reference to val
3737 requires loading up part of an address in a separate insn.
3739 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3740 since it might be a promoted variable where the zero- or sign- extension
3741 needs to be done. Handling this in the normal way is safe because no
3742 computation is done before the call. */
3743 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3744 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3745 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3746 && GET_CODE (DECL_RTL (to)) == REG))
3751 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3753 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3755 /* Handle calls that return values in multiple non-contiguous locations.
3756 The Irix 6 ABI has examples of this. */
3757 if (GET_CODE (to_rtx) == PARALLEL)
3758 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3759 TYPE_ALIGN (TREE_TYPE (from)));
3760 else if (GET_MODE (to_rtx) == BLKmode)
3761 emit_block_move (to_rtx, value, expr_size (from),
3762 TYPE_ALIGN (TREE_TYPE (from)));
3765 #ifdef POINTERS_EXTEND_UNSIGNED
3766 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3767 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3768 value = convert_memory_address (GET_MODE (to_rtx), value);
3770 emit_move_insn (to_rtx, value);
3772 preserve_temp_slots (to_rtx);
3775 return want_value ? to_rtx : NULL_RTX;
3778 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3779 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3783 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3784 if (GET_CODE (to_rtx) == MEM)
3785 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3788 /* Don't move directly into a return register. */
3789 if (TREE_CODE (to) == RESULT_DECL
3790 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3795 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3797 if (GET_CODE (to_rtx) == PARALLEL)
3798 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3799 TYPE_ALIGN (TREE_TYPE (from)));
3801 emit_move_insn (to_rtx, temp);
3803 preserve_temp_slots (to_rtx);
3806 return want_value ? to_rtx : NULL_RTX;
3809 /* In case we are returning the contents of an object which overlaps
3810 the place the value is being stored, use a safe function when copying
3811 a value through a pointer into a structure value return block. */
3812 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3813 && current_function_returns_struct
3814 && !current_function_returns_pcc_struct)
3819 size = expr_size (from);
3820 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3821 EXPAND_MEMORY_USE_DONT);
3823 /* Copy the rights of the bitmap. */
3824 if (current_function_check_memory_usage)
3825 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3826 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3827 XEXP (from_rtx, 0), Pmode,
3828 convert_to_mode (TYPE_MODE (sizetype),
3829 size, TREE_UNSIGNED (sizetype)),
3830 TYPE_MODE (sizetype));
3832 #ifdef TARGET_MEM_FUNCTIONS
3833 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3834 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3835 XEXP (from_rtx, 0), Pmode,
3836 convert_to_mode (TYPE_MODE (sizetype),
3837 size, TREE_UNSIGNED (sizetype)),
3838 TYPE_MODE (sizetype));
3840 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3841 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3842 XEXP (to_rtx, 0), Pmode,
3843 convert_to_mode (TYPE_MODE (integer_type_node),
3844 size, TREE_UNSIGNED (integer_type_node)),
3845 TYPE_MODE (integer_type_node));
3848 preserve_temp_slots (to_rtx);
3851 return want_value ? to_rtx : NULL_RTX;
3854 /* Compute FROM and store the value in the rtx we got. */
3857 result = store_expr (from, to_rtx, want_value);
3858 preserve_temp_slots (result);
3861 return want_value ? result : NULL_RTX;
3864 /* Generate code for computing expression EXP,
3865 and storing the value into TARGET.
3866 TARGET may contain a QUEUED rtx.
3868 If WANT_VALUE is nonzero, return a copy of the value
3869 not in TARGET, so that we can be sure to use the proper
3870 value in a containing expression even if TARGET has something
3871 else stored in it. If possible, we copy the value through a pseudo
3872 and return that pseudo. Or, if the value is constant, we try to
3873 return the constant. In some cases, we return a pseudo
3874 copied *from* TARGET.
3876 If the mode is BLKmode then we may return TARGET itself.
3877 It turns out that in BLKmode it doesn't cause a problem.
3878 because C has no operators that could combine two different
3879 assignments into the same BLKmode object with different values
3880 with no sequence point. Will other languages need this to
3883 If WANT_VALUE is 0, we return NULL, to make sure
3884 to catch quickly any cases where the caller uses the value
3885 and fails to set WANT_VALUE. */
3888 store_expr (exp, target, want_value)
3890 register rtx target;
3894 int dont_return_target = 0;
3896 if (TREE_CODE (exp) == COMPOUND_EXPR)
3898 /* Perform first part of compound expression, then assign from second
3900 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3902 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3904 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3906 /* For conditional expression, get safe form of the target. Then
3907 test the condition, doing the appropriate assignment on either
3908 side. This avoids the creation of unnecessary temporaries.
3909 For non-BLKmode, it is more efficient not to do this. */
3911 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3914 target = protect_from_queue (target, 1);
3916 do_pending_stack_adjust ();
3918 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3919 start_cleanup_deferral ();
3920 store_expr (TREE_OPERAND (exp, 1), target, 0);
3921 end_cleanup_deferral ();
3923 emit_jump_insn (gen_jump (lab2));
3926 start_cleanup_deferral ();
3927 store_expr (TREE_OPERAND (exp, 2), target, 0);
3928 end_cleanup_deferral ();
3933 return want_value ? target : NULL_RTX;
3935 else if (queued_subexp_p (target))
3936 /* If target contains a postincrement, let's not risk
3937 using it as the place to generate the rhs. */
3939 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3941 /* Expand EXP into a new pseudo. */
3942 temp = gen_reg_rtx (GET_MODE (target));
3943 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3946 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3948 /* If target is volatile, ANSI requires accessing the value
3949 *from* the target, if it is accessed. So make that happen.
3950 In no case return the target itself. */
3951 if (! MEM_VOLATILE_P (target) && want_value)
3952 dont_return_target = 1;
3954 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3955 && GET_MODE (target) != BLKmode)
3956 /* If target is in memory and caller wants value in a register instead,
3957 arrange that. Pass TARGET as target for expand_expr so that,
3958 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3959 We know expand_expr will not use the target in that case.
3960 Don't do this if TARGET is volatile because we are supposed
3961 to write it and then read it. */
3963 temp = expand_expr (exp, target, GET_MODE (target), 0);
3964 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3965 temp = copy_to_reg (temp);
3966 dont_return_target = 1;
3968 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3969 /* If this is an scalar in a register that is stored in a wider mode
3970 than the declared mode, compute the result into its declared mode
3971 and then convert to the wider mode. Our value is the computed
3974 /* If we don't want a value, we can do the conversion inside EXP,
3975 which will often result in some optimizations. Do the conversion
3976 in two steps: first change the signedness, if needed, then
3977 the extend. But don't do this if the type of EXP is a subtype
3978 of something else since then the conversion might involve
3979 more than just converting modes. */
3980 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3981 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3983 if (TREE_UNSIGNED (TREE_TYPE (exp))
3984 != SUBREG_PROMOTED_UNSIGNED_P (target))
3987 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3991 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3992 SUBREG_PROMOTED_UNSIGNED_P (target)),
3996 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3998 /* If TEMP is a volatile MEM and we want a result value, make
3999 the access now so it gets done only once. Likewise if
4000 it contains TARGET. */
4001 if (GET_CODE (temp) == MEM && want_value
4002 && (MEM_VOLATILE_P (temp)
4003 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4004 temp = copy_to_reg (temp);
4006 /* If TEMP is a VOIDmode constant, use convert_modes to make
4007 sure that we properly convert it. */
4008 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4009 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4010 TYPE_MODE (TREE_TYPE (exp)), temp,
4011 SUBREG_PROMOTED_UNSIGNED_P (target));
4013 convert_move (SUBREG_REG (target), temp,
4014 SUBREG_PROMOTED_UNSIGNED_P (target));
4016 /* If we promoted a constant, change the mode back down to match
4017 target. Otherwise, the caller might get confused by a result whose
4018 mode is larger than expected. */
4020 if (want_value && GET_MODE (temp) != GET_MODE (target)
4021 && GET_MODE (temp) != VOIDmode)
4023 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
4024 SUBREG_PROMOTED_VAR_P (temp) = 1;
4025 SUBREG_PROMOTED_UNSIGNED_P (temp)
4026 = SUBREG_PROMOTED_UNSIGNED_P (target);
4029 return want_value ? temp : NULL_RTX;
4033 temp = expand_expr (exp, target, GET_MODE (target), 0);
4034 /* Return TARGET if it's a specified hardware register.
4035 If TARGET is a volatile mem ref, either return TARGET
4036 or return a reg copied *from* TARGET; ANSI requires this.
4038 Otherwise, if TEMP is not TARGET, return TEMP
4039 if it is constant (for efficiency),
4040 or if we really want the correct value. */
4041 if (!(target && GET_CODE (target) == REG
4042 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4043 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4044 && ! rtx_equal_p (temp, target)
4045 && (CONSTANT_P (temp) || want_value))
4046 dont_return_target = 1;
4049 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4050 the same as that of TARGET, adjust the constant. This is needed, for
4051 example, in case it is a CONST_DOUBLE and we want only a word-sized
4053 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4054 && TREE_CODE (exp) != ERROR_MARK
4055 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4056 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4057 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4059 if (current_function_check_memory_usage
4060 && GET_CODE (target) == MEM
4061 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4063 in_check_memory_usage = 1;
4064 if (GET_CODE (temp) == MEM)
4065 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4066 VOIDmode, 3, XEXP (target, 0), Pmode,
4067 XEXP (temp, 0), Pmode,
4068 expr_size (exp), TYPE_MODE (sizetype));
4070 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4071 VOIDmode, 3, XEXP (target, 0), Pmode,
4072 expr_size (exp), TYPE_MODE (sizetype),
4073 GEN_INT (MEMORY_USE_WO),
4074 TYPE_MODE (integer_type_node));
4075 in_check_memory_usage = 0;
4078 /* If value was not generated in the target, store it there.
4079 Convert the value to TARGET's type first if nec. */
4080 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4081 one or both of them are volatile memory refs, we have to distinguish
4083 - expand_expr has used TARGET. In this case, we must not generate
4084 another copy. This can be detected by TARGET being equal according
4086 - expand_expr has not used TARGET - that means that the source just
4087 happens to have the same RTX form. Since temp will have been created
4088 by expand_expr, it will compare unequal according to == .
4089 We must generate a copy in this case, to reach the correct number
4090 of volatile memory references. */
4092 if ((! rtx_equal_p (temp, target)
4093 || (temp != target && (side_effects_p (temp)
4094 || side_effects_p (target))))
4095 && TREE_CODE (exp) != ERROR_MARK)
4097 target = protect_from_queue (target, 1);
4098 if (GET_MODE (temp) != GET_MODE (target)
4099 && GET_MODE (temp) != VOIDmode)
4101 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4102 if (dont_return_target)
4104 /* In this case, we will return TEMP,
4105 so make sure it has the proper mode.
4106 But don't forget to store the value into TARGET. */
4107 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4108 emit_move_insn (target, temp);
4111 convert_move (target, temp, unsignedp);
4114 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4116 /* Handle copying a string constant into an array.
4117 The string constant may be shorter than the array.
4118 So copy just the string's actual length, and clear the rest. */
4122 /* Get the size of the data type of the string,
4123 which is actually the size of the target. */
4124 size = expr_size (exp);
4125 if (GET_CODE (size) == CONST_INT
4126 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4127 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4130 /* Compute the size of the data to copy from the string. */
4132 = size_binop (MIN_EXPR,
4133 make_tree (sizetype, size),
4134 size_int (TREE_STRING_LENGTH (exp)));
4135 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4136 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4140 /* Copy that much. */
4141 emit_block_move (target, temp, copy_size_rtx,
4142 TYPE_ALIGN (TREE_TYPE (exp)));
4144 /* Figure out how much is left in TARGET that we have to clear.
4145 Do all calculations in ptr_mode. */
4147 addr = XEXP (target, 0);
4148 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4150 if (GET_CODE (copy_size_rtx) == CONST_INT)
4152 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4153 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4154 align = MIN (align, (BITS_PER_UNIT
4155 * (INTVAL (copy_size_rtx)
4156 & - INTVAL (copy_size_rtx))));
4160 addr = force_reg (ptr_mode, addr);
4161 addr = expand_binop (ptr_mode, add_optab, addr,
4162 copy_size_rtx, NULL_RTX, 0,
4165 size = expand_binop (ptr_mode, sub_optab, size,
4166 copy_size_rtx, NULL_RTX, 0,
4169 align = BITS_PER_UNIT;
4170 label = gen_label_rtx ();
4171 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4172 GET_MODE (size), 0, 0, label);
4174 align = MIN (align, expr_align (copy_size));
4176 if (size != const0_rtx)
4178 rtx dest = gen_rtx_MEM (BLKmode, addr);
4180 MEM_COPY_ATTRIBUTES (dest, target);
4182 /* Be sure we can write on ADDR. */
4183 in_check_memory_usage = 1;
4184 if (current_function_check_memory_usage)
4185 emit_library_call (chkr_check_addr_libfunc,
4186 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4188 size, TYPE_MODE (sizetype),
4189 GEN_INT (MEMORY_USE_WO),
4190 TYPE_MODE (integer_type_node));
4191 in_check_memory_usage = 0;
4192 clear_storage (dest, size, align);
4199 /* Handle calls that return values in multiple non-contiguous locations.
4200 The Irix 6 ABI has examples of this. */
4201 else if (GET_CODE (target) == PARALLEL)
4202 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4203 TYPE_ALIGN (TREE_TYPE (exp)));
4204 else if (GET_MODE (temp) == BLKmode)
4205 emit_block_move (target, temp, expr_size (exp),
4206 TYPE_ALIGN (TREE_TYPE (exp)));
4208 emit_move_insn (target, temp);
4211 /* If we don't want a value, return NULL_RTX. */
4215 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4216 ??? The latter test doesn't seem to make sense. */
4217 else if (dont_return_target && GET_CODE (temp) != MEM)
4220 /* Return TARGET itself if it is a hard register. */
4221 else if (want_value && GET_MODE (target) != BLKmode
4222 && ! (GET_CODE (target) == REG
4223 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4224 return copy_to_reg (target);
4230 /* Return 1 if EXP just contains zeros. */
4238 switch (TREE_CODE (exp))
4242 case NON_LVALUE_EXPR:
4243 return is_zeros_p (TREE_OPERAND (exp, 0));
4246 return integer_zerop (exp);
4250 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4253 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4256 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4257 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4258 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4259 if (! is_zeros_p (TREE_VALUE (elt)))
4269 /* Return 1 if EXP contains mostly (3/4) zeros. */
4272 mostly_zeros_p (exp)
4275 if (TREE_CODE (exp) == CONSTRUCTOR)
4277 int elts = 0, zeros = 0;
4278 tree elt = CONSTRUCTOR_ELTS (exp);
4279 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4281 /* If there are no ranges of true bits, it is all zero. */
4282 return elt == NULL_TREE;
4284 for (; elt; elt = TREE_CHAIN (elt))
4286 /* We do not handle the case where the index is a RANGE_EXPR,
4287 so the statistic will be somewhat inaccurate.
4288 We do make a more accurate count in store_constructor itself,
4289 so since this function is only used for nested array elements,
4290 this should be close enough. */
4291 if (mostly_zeros_p (TREE_VALUE (elt)))
4296 return 4 * zeros >= 3 * elts;
4299 return is_zeros_p (exp);
4302 /* Helper function for store_constructor.
4303 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4304 TYPE is the type of the CONSTRUCTOR, not the element type.
4305 ALIGN and CLEARED are as for store_constructor.
4306 ALIAS_SET is the alias set to use for any stores.
4308 This provides a recursive shortcut back to store_constructor when it isn't
4309 necessary to go through store_field. This is so that we can pass through
4310 the cleared field to let store_constructor know that we may not have to
4311 clear a substructure if the outer structure has already been cleared. */
4314 store_constructor_field (target, bitsize, bitpos,
4315 mode, exp, type, align, cleared, alias_set)
4317 unsigned HOST_WIDE_INT bitsize;
4318 HOST_WIDE_INT bitpos;
4319 enum machine_mode mode;
4325 if (TREE_CODE (exp) == CONSTRUCTOR
4326 && bitpos % BITS_PER_UNIT == 0
4327 /* If we have a non-zero bitpos for a register target, then we just
4328 let store_field do the bitfield handling. This is unlikely to
4329 generate unnecessary clear instructions anyways. */
4330 && (bitpos == 0 || GET_CODE (target) == MEM))
4334 = change_address (target,
4335 GET_MODE (target) == BLKmode
4337 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4338 ? BLKmode : VOIDmode,
4339 plus_constant (XEXP (target, 0),
4340 bitpos / BITS_PER_UNIT));
4343 /* Show the alignment may no longer be what it was and update the alias
4344 set, if required. */
4346 align = MIN (align, bitpos & - bitpos);
4347 if (GET_CODE (target) == MEM)
4348 MEM_ALIAS_SET (target) = alias_set;
4350 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4353 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4354 int_size_in_bytes (type), alias_set);
4357 /* Store the value of constructor EXP into the rtx TARGET.
4358 TARGET is either a REG or a MEM.
4359 ALIGN is the maximum known alignment for TARGET.
4360 CLEARED is true if TARGET is known to have been zero'd.
4361 SIZE is the number of bytes of TARGET we are allowed to modify: this
4362 may not be the same as the size of EXP if we are assigning to a field
4363 which has been packed to exclude padding bits. */
4366 store_constructor (exp, target, align, cleared, size)
4373 tree type = TREE_TYPE (exp);
4374 #ifdef WORD_REGISTER_OPERATIONS
4375 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4378 /* We know our target cannot conflict, since safe_from_p has been called. */
4380 /* Don't try copying piece by piece into a hard register
4381 since that is vulnerable to being clobbered by EXP.
4382 Instead, construct in a pseudo register and then copy it all. */
4383 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4385 rtx temp = gen_reg_rtx (GET_MODE (target));
4386 store_constructor (exp, temp, align, cleared, size);
4387 emit_move_insn (target, temp);
4392 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4393 || TREE_CODE (type) == QUAL_UNION_TYPE)
4397 /* Inform later passes that the whole union value is dead. */
4398 if ((TREE_CODE (type) == UNION_TYPE
4399 || TREE_CODE (type) == QUAL_UNION_TYPE)
4402 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4404 /* If the constructor is empty, clear the union. */
4405 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4406 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4409 /* If we are building a static constructor into a register,
4410 set the initial value as zero so we can fold the value into
4411 a constant. But if more than one register is involved,
4412 this probably loses. */
4413 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4414 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4417 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4422 /* If the constructor has fewer fields than the structure
4423 or if we are initializing the structure to mostly zeros,
4424 clear the whole structure first. Don't do this is TARGET is
4425 register whose mode size isn't equal to SIZE since clear_storage
4426 can't handle this case. */
4428 && ((list_length (CONSTRUCTOR_ELTS (exp))
4429 != fields_length (type))
4430 || mostly_zeros_p (exp))
4431 && (GET_CODE (target) != REG
4432 || GET_MODE_SIZE (GET_MODE (target)) == size))
4435 clear_storage (target, GEN_INT (size), align);
4440 /* Inform later passes that the old value is dead. */
4441 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4443 /* Store each element of the constructor into
4444 the corresponding field of TARGET. */
4446 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4448 register tree field = TREE_PURPOSE (elt);
4449 #ifdef WORD_REGISTER_OPERATIONS
4450 tree value = TREE_VALUE (elt);
4452 register enum machine_mode mode;
4453 HOST_WIDE_INT bitsize;
4454 HOST_WIDE_INT bitpos = 0;
4457 rtx to_rtx = target;
4459 /* Just ignore missing fields.
4460 We cleared the whole structure, above,
4461 if any fields are missing. */
4465 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4468 if (host_integerp (DECL_SIZE (field), 1))
4469 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4473 unsignedp = TREE_UNSIGNED (field);
4474 mode = DECL_MODE (field);
4475 if (DECL_BIT_FIELD (field))
4478 offset = DECL_FIELD_OFFSET (field);
4479 if (host_integerp (offset, 0)
4480 && host_integerp (bit_position (field), 0))
4482 bitpos = int_bit_position (field);
4486 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4492 if (contains_placeholder_p (offset))
4493 offset = build (WITH_RECORD_EXPR, sizetype,
4494 offset, make_tree (TREE_TYPE (exp), target));
4496 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4497 if (GET_CODE (to_rtx) != MEM)
4500 if (GET_MODE (offset_rtx) != ptr_mode)
4502 #ifdef POINTERS_EXTEND_UNSIGNED
4503 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4505 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4510 = change_address (to_rtx, VOIDmode,
4511 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4512 force_reg (ptr_mode,
4514 align = DECL_OFFSET_ALIGN (field);
4517 if (TREE_READONLY (field))
4519 if (GET_CODE (to_rtx) == MEM)
4520 to_rtx = copy_rtx (to_rtx);
4522 RTX_UNCHANGING_P (to_rtx) = 1;
4525 #ifdef WORD_REGISTER_OPERATIONS
4526 /* If this initializes a field that is smaller than a word, at the
4527 start of a word, try to widen it to a full word.
4528 This special case allows us to output C++ member function
4529 initializations in a form that the optimizers can understand. */
4530 if (GET_CODE (target) == REG
4531 && bitsize < BITS_PER_WORD
4532 && bitpos % BITS_PER_WORD == 0
4533 && GET_MODE_CLASS (mode) == MODE_INT
4534 && TREE_CODE (value) == INTEGER_CST
4536 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4538 tree type = TREE_TYPE (value);
4539 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4541 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4542 value = convert (type, value);
4544 if (BYTES_BIG_ENDIAN)
4546 = fold (build (LSHIFT_EXPR, type, value,
4547 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4548 bitsize = BITS_PER_WORD;
4552 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4553 TREE_VALUE (elt), type, align, cleared,
4554 (DECL_NONADDRESSABLE_P (field)
4555 && GET_CODE (to_rtx) == MEM)
4556 ? MEM_ALIAS_SET (to_rtx)
4557 : get_alias_set (TREE_TYPE (field)));
4560 else if (TREE_CODE (type) == ARRAY_TYPE)
4565 tree domain = TYPE_DOMAIN (type);
4566 tree elttype = TREE_TYPE (type);
4567 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4568 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4569 HOST_WIDE_INT minelt;
4570 HOST_WIDE_INT maxelt;
4572 /* If we have constant bounds for the range of the type, get them. */
4575 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4576 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4579 /* If the constructor has fewer elements than the array,
4580 clear the whole array first. Similarly if this is
4581 static constructor of a non-BLKmode object. */
4582 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4586 HOST_WIDE_INT count = 0, zero_count = 0;
4587 need_to_clear = ! const_bounds_p;
4589 /* This loop is a more accurate version of the loop in
4590 mostly_zeros_p (it handles RANGE_EXPR in an index).
4591 It is also needed to check for missing elements. */
4592 for (elt = CONSTRUCTOR_ELTS (exp);
4593 elt != NULL_TREE && ! need_to_clear;
4594 elt = TREE_CHAIN (elt))
4596 tree index = TREE_PURPOSE (elt);
4597 HOST_WIDE_INT this_node_count;
4599 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4601 tree lo_index = TREE_OPERAND (index, 0);
4602 tree hi_index = TREE_OPERAND (index, 1);
4604 if (! host_integerp (lo_index, 1)
4605 || ! host_integerp (hi_index, 1))
4611 this_node_count = (tree_low_cst (hi_index, 1)
4612 - tree_low_cst (lo_index, 1) + 1);
4615 this_node_count = 1;
4617 count += this_node_count;
4618 if (mostly_zeros_p (TREE_VALUE (elt)))
4619 zero_count += this_node_count;
4622 /* Clear the entire array first if there are any missing elements,
4623 or if the incidence of zero elements is >= 75%. */
4625 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4629 if (need_to_clear && size > 0)
4632 clear_storage (target, GEN_INT (size), align);
4636 /* Inform later passes that the old value is dead. */
4637 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4639 /* Store each element of the constructor into
4640 the corresponding element of TARGET, determined
4641 by counting the elements. */
4642 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4644 elt = TREE_CHAIN (elt), i++)
4646 register enum machine_mode mode;
4647 HOST_WIDE_INT bitsize;
4648 HOST_WIDE_INT bitpos;
4650 tree value = TREE_VALUE (elt);
4651 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4652 tree index = TREE_PURPOSE (elt);
4653 rtx xtarget = target;
4655 if (cleared && is_zeros_p (value))
4658 unsignedp = TREE_UNSIGNED (elttype);
4659 mode = TYPE_MODE (elttype);
4660 if (mode == BLKmode)
4661 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4662 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4665 bitsize = GET_MODE_BITSIZE (mode);
4667 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4669 tree lo_index = TREE_OPERAND (index, 0);
4670 tree hi_index = TREE_OPERAND (index, 1);
4671 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4672 struct nesting *loop;
4673 HOST_WIDE_INT lo, hi, count;
4676 /* If the range is constant and "small", unroll the loop. */
4678 && host_integerp (lo_index, 0)
4679 && host_integerp (hi_index, 0)
4680 && (lo = tree_low_cst (lo_index, 0),
4681 hi = tree_low_cst (hi_index, 0),
4682 count = hi - lo + 1,
4683 (GET_CODE (target) != MEM
4685 || (host_integerp (TYPE_SIZE (elttype), 1)
4686 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4689 lo -= minelt; hi -= minelt;
4690 for (; lo <= hi; lo++)
4692 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4693 store_constructor_field
4694 (target, bitsize, bitpos, mode, value, type, align,
4696 TYPE_NONALIASED_COMPONENT (type)
4697 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4702 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4703 loop_top = gen_label_rtx ();
4704 loop_end = gen_label_rtx ();
4706 unsignedp = TREE_UNSIGNED (domain);
4708 index = build_decl (VAR_DECL, NULL_TREE, domain);
4710 DECL_RTL (index) = index_r
4711 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4714 if (TREE_CODE (value) == SAVE_EXPR
4715 && SAVE_EXPR_RTL (value) == 0)
4717 /* Make sure value gets expanded once before the
4719 expand_expr (value, const0_rtx, VOIDmode, 0);
4722 store_expr (lo_index, index_r, 0);
4723 loop = expand_start_loop (0);
4725 /* Assign value to element index. */
4727 = convert (ssizetype,
4728 fold (build (MINUS_EXPR, TREE_TYPE (index),
4729 index, TYPE_MIN_VALUE (domain))));
4730 position = size_binop (MULT_EXPR, position,
4732 TYPE_SIZE_UNIT (elttype)));
4734 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4735 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4736 xtarget = change_address (target, mode, addr);
4737 if (TREE_CODE (value) == CONSTRUCTOR)
4738 store_constructor (value, xtarget, align, cleared,
4739 bitsize / BITS_PER_UNIT);
4741 store_expr (value, xtarget, 0);
4743 expand_exit_loop_if_false (loop,
4744 build (LT_EXPR, integer_type_node,
4747 expand_increment (build (PREINCREMENT_EXPR,
4749 index, integer_one_node), 0, 0);
4751 emit_label (loop_end);
4754 else if ((index != 0 && ! host_integerp (index, 0))
4755 || ! host_integerp (TYPE_SIZE (elttype), 1))
4761 index = ssize_int (1);
4764 index = convert (ssizetype,
4765 fold (build (MINUS_EXPR, index,
4766 TYPE_MIN_VALUE (domain))));
4768 position = size_binop (MULT_EXPR, index,
4770 TYPE_SIZE_UNIT (elttype)));
4771 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4772 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4773 xtarget = change_address (target, mode, addr);
4774 store_expr (value, xtarget, 0);
4779 bitpos = ((tree_low_cst (index, 0) - minelt)
4780 * tree_low_cst (TYPE_SIZE (elttype), 1));
4782 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4784 store_constructor_field (target, bitsize, bitpos, mode, value,
4785 type, align, cleared,
4786 TYPE_NONALIASED_COMPONENT (type)
4787 ? MEM_ALIAS_SET (target) :
4788 get_alias_set (elttype));
4794 /* Set constructor assignments. */
4795 else if (TREE_CODE (type) == SET_TYPE)
4797 tree elt = CONSTRUCTOR_ELTS (exp);
4798 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4799 tree domain = TYPE_DOMAIN (type);
4800 tree domain_min, domain_max, bitlength;
4802 /* The default implementation strategy is to extract the constant
4803 parts of the constructor, use that to initialize the target,
4804 and then "or" in whatever non-constant ranges we need in addition.
4806 If a large set is all zero or all ones, it is
4807 probably better to set it using memset (if available) or bzero.
4808 Also, if a large set has just a single range, it may also be
4809 better to first clear all the first clear the set (using
4810 bzero/memset), and set the bits we want. */
4812 /* Check for all zeros. */
4813 if (elt == NULL_TREE && size > 0)
4816 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4820 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4821 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4822 bitlength = size_binop (PLUS_EXPR,
4823 size_diffop (domain_max, domain_min),
4826 nbits = tree_low_cst (bitlength, 1);
4828 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4829 are "complicated" (more than one range), initialize (the
4830 constant parts) by copying from a constant. */
4831 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4832 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4834 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4835 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4836 char *bit_buffer = (char *) alloca (nbits);
4837 HOST_WIDE_INT word = 0;
4838 unsigned int bit_pos = 0;
4839 unsigned int ibit = 0;
4840 unsigned int offset = 0; /* In bytes from beginning of set. */
4842 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4845 if (bit_buffer[ibit])
4847 if (BYTES_BIG_ENDIAN)
4848 word |= (1 << (set_word_size - 1 - bit_pos));
4850 word |= 1 << bit_pos;
4854 if (bit_pos >= set_word_size || ibit == nbits)
4856 if (word != 0 || ! cleared)
4858 rtx datum = GEN_INT (word);
4861 /* The assumption here is that it is safe to use
4862 XEXP if the set is multi-word, but not if
4863 it's single-word. */
4864 if (GET_CODE (target) == MEM)
4866 to_rtx = plus_constant (XEXP (target, 0), offset);
4867 to_rtx = change_address (target, mode, to_rtx);
4869 else if (offset == 0)
4873 emit_move_insn (to_rtx, datum);
4880 offset += set_word_size / BITS_PER_UNIT;
4885 /* Don't bother clearing storage if the set is all ones. */
4886 if (TREE_CHAIN (elt) != NULL_TREE
4887 || (TREE_PURPOSE (elt) == NULL_TREE
4889 : ( ! host_integerp (TREE_VALUE (elt), 0)
4890 || ! host_integerp (TREE_PURPOSE (elt), 0)
4891 || (tree_low_cst (TREE_VALUE (elt), 0)
4892 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4893 != (HOST_WIDE_INT) nbits))))
4894 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4896 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4898 /* Start of range of element or NULL. */
4899 tree startbit = TREE_PURPOSE (elt);
4900 /* End of range of element, or element value. */
4901 tree endbit = TREE_VALUE (elt);
4902 #ifdef TARGET_MEM_FUNCTIONS
4903 HOST_WIDE_INT startb, endb;
4905 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4907 bitlength_rtx = expand_expr (bitlength,
4908 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4910 /* Handle non-range tuple element like [ expr ]. */
4911 if (startbit == NULL_TREE)
4913 startbit = save_expr (endbit);
4917 startbit = convert (sizetype, startbit);
4918 endbit = convert (sizetype, endbit);
4919 if (! integer_zerop (domain_min))
4921 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4922 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4924 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4925 EXPAND_CONST_ADDRESS);
4926 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4927 EXPAND_CONST_ADDRESS);
4933 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4936 emit_move_insn (targetx, target);
4939 else if (GET_CODE (target) == MEM)
4944 #ifdef TARGET_MEM_FUNCTIONS
4945 /* Optimization: If startbit and endbit are
4946 constants divisible by BITS_PER_UNIT,
4947 call memset instead. */
4948 if (TREE_CODE (startbit) == INTEGER_CST
4949 && TREE_CODE (endbit) == INTEGER_CST
4950 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4951 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4953 emit_library_call (memset_libfunc, LCT_NORMAL,
4955 plus_constant (XEXP (targetx, 0),
4956 startb / BITS_PER_UNIT),
4958 constm1_rtx, TYPE_MODE (integer_type_node),
4959 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4960 TYPE_MODE (sizetype));
4964 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4965 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4966 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4967 startbit_rtx, TYPE_MODE (sizetype),
4968 endbit_rtx, TYPE_MODE (sizetype));
4971 emit_move_insn (target, targetx);
4979 /* Store the value of EXP (an expression tree)
4980 into a subfield of TARGET which has mode MODE and occupies
4981 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4982 If MODE is VOIDmode, it means that we are storing into a bit-field.
4984 If VALUE_MODE is VOIDmode, return nothing in particular.
4985 UNSIGNEDP is not used in this case.
4987 Otherwise, return an rtx for the value stored. This rtx
4988 has mode VALUE_MODE if that is convenient to do.
4989 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4991 ALIGN is the alignment that TARGET is known to have.
4992 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4994 ALIAS_SET is the alias set for the destination. This value will
4995 (in general) be different from that for TARGET, since TARGET is a
4996 reference to the containing structure. */
4999 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5000 unsignedp, align, total_size, alias_set)
5002 HOST_WIDE_INT bitsize;
5003 HOST_WIDE_INT bitpos;
5004 enum machine_mode mode;
5006 enum machine_mode value_mode;
5009 HOST_WIDE_INT total_size;
5012 HOST_WIDE_INT width_mask = 0;
5014 if (TREE_CODE (exp) == ERROR_MARK)
5017 if (bitsize < HOST_BITS_PER_WIDE_INT)
5018 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5020 /* If we are storing into an unaligned field of an aligned union that is
5021 in a register, we may have the mode of TARGET being an integer mode but
5022 MODE == BLKmode. In that case, get an aligned object whose size and
5023 alignment are the same as TARGET and store TARGET into it (we can avoid
5024 the store if the field being stored is the entire width of TARGET). Then
5025 call ourselves recursively to store the field into a BLKmode version of
5026 that object. Finally, load from the object into TARGET. This is not
5027 very efficient in general, but should only be slightly more expensive
5028 than the otherwise-required unaligned accesses. Perhaps this can be
5029 cleaned up later. */
5032 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5036 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5039 rtx blk_object = copy_rtx (object);
5041 PUT_MODE (blk_object, BLKmode);
5043 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
5044 emit_move_insn (object, target);
5046 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5047 align, total_size, alias_set);
5049 /* Even though we aren't returning target, we need to
5050 give it the updated value. */
5051 emit_move_insn (target, object);
5056 if (GET_CODE (target) == CONCAT)
5058 /* We're storing into a struct containing a single __complex. */
5062 return store_expr (exp, target, 0);
5065 /* If the structure is in a register or if the component
5066 is a bit field, we cannot use addressing to access it.
5067 Use bit-field techniques or SUBREG to store in it. */
5069 if (mode == VOIDmode
5070 || (mode != BLKmode && ! direct_store[(int) mode]
5071 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5072 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5073 || GET_CODE (target) == REG
5074 || GET_CODE (target) == SUBREG
5075 /* If the field isn't aligned enough to store as an ordinary memref,
5076 store it as a bit field. */
5077 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5078 && (align < GET_MODE_ALIGNMENT (mode)
5079 || bitpos % GET_MODE_ALIGNMENT (mode)))
5080 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5081 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5082 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5083 /* If the RHS and field are a constant size and the size of the
5084 RHS isn't the same size as the bitfield, we must use bitfield
5087 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5088 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5090 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5092 /* If BITSIZE is narrower than the size of the type of EXP
5093 we will be narrowing TEMP. Normally, what's wanted are the
5094 low-order bits. However, if EXP's type is a record and this is
5095 big-endian machine, we want the upper BITSIZE bits. */
5096 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5097 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5098 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5099 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5100 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5104 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5106 if (mode != VOIDmode && mode != BLKmode
5107 && mode != TYPE_MODE (TREE_TYPE (exp)))
5108 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5110 /* If the modes of TARGET and TEMP are both BLKmode, both
5111 must be in memory and BITPOS must be aligned on a byte
5112 boundary. If so, we simply do a block copy. */
5113 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5115 unsigned int exp_align = expr_align (exp);
5117 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5118 || bitpos % BITS_PER_UNIT != 0)
5121 target = change_address (target, VOIDmode,
5122 plus_constant (XEXP (target, 0),
5123 bitpos / BITS_PER_UNIT));
5125 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5126 align = MIN (exp_align, align);
5128 /* Find an alignment that is consistent with the bit position. */
5129 while ((bitpos % align) != 0)
5132 emit_block_move (target, temp,
5133 bitsize == -1 ? expr_size (exp)
5134 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5138 return value_mode == VOIDmode ? const0_rtx : target;
5141 /* Store the value in the bitfield. */
5142 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5143 if (value_mode != VOIDmode)
5145 /* The caller wants an rtx for the value. */
5146 /* If possible, avoid refetching from the bitfield itself. */
5148 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5151 enum machine_mode tmode;
5154 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5155 tmode = GET_MODE (temp);
5156 if (tmode == VOIDmode)
5158 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5159 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5160 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5162 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5163 NULL_RTX, value_mode, 0, align,
5170 rtx addr = XEXP (target, 0);
5173 /* If a value is wanted, it must be the lhs;
5174 so make the address stable for multiple use. */
5176 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5177 && ! CONSTANT_ADDRESS_P (addr)
5178 /* A frame-pointer reference is already stable. */
5179 && ! (GET_CODE (addr) == PLUS
5180 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5181 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5182 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5183 addr = copy_to_reg (addr);
5185 /* Now build a reference to just the desired component. */
5187 to_rtx = copy_rtx (change_address (target, mode,
5188 plus_constant (addr,
5190 / BITS_PER_UNIT))));
5191 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5192 MEM_ALIAS_SET (to_rtx) = alias_set;
5194 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5198 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5199 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5200 ARRAY_REFs and find the ultimate containing object, which we return.
5202 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5203 bit position, and *PUNSIGNEDP to the signedness of the field.
5204 If the position of the field is variable, we store a tree
5205 giving the variable offset (in units) in *POFFSET.
5206 This offset is in addition to the bit position.
5207 If the position is not variable, we store 0 in *POFFSET.
5208 We set *PALIGNMENT to the alignment of the address that will be
5209 computed. This is the alignment of the thing we return if *POFFSET
5210 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5212 If any of the extraction expressions is volatile,
5213 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5215 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5216 is a mode that can be used to access the field. In that case, *PBITSIZE
5219 If the field describes a variable-sized object, *PMODE is set to
5220 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5221 this case, but the address of the object can be found. */
5224 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5225 punsignedp, pvolatilep, palignment)
5227 HOST_WIDE_INT *pbitsize;
5228 HOST_WIDE_INT *pbitpos;
5230 enum machine_mode *pmode;
5233 unsigned int *palignment;
5236 enum machine_mode mode = VOIDmode;
5237 tree offset = size_zero_node;
5238 tree bit_offset = bitsize_zero_node;
5239 unsigned int alignment = BIGGEST_ALIGNMENT;
5242 /* First get the mode, signedness, and size. We do this from just the
5243 outermost expression. */
5244 if (TREE_CODE (exp) == COMPONENT_REF)
5246 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5247 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5248 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5250 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5252 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5254 size_tree = TREE_OPERAND (exp, 1);
5255 *punsignedp = TREE_UNSIGNED (exp);
5259 mode = TYPE_MODE (TREE_TYPE (exp));
5260 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5262 if (mode == BLKmode)
5263 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5265 *pbitsize = GET_MODE_BITSIZE (mode);
5270 if (! host_integerp (size_tree, 1))
5271 mode = BLKmode, *pbitsize = -1;
5273 *pbitsize = tree_low_cst (size_tree, 1);
5276 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5277 and find the ultimate containing object. */
5280 if (TREE_CODE (exp) == BIT_FIELD_REF)
5281 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5282 else if (TREE_CODE (exp) == COMPONENT_REF)
5284 tree field = TREE_OPERAND (exp, 1);
5285 tree this_offset = DECL_FIELD_OFFSET (field);
5287 /* If this field hasn't been filled in yet, don't go
5288 past it. This should only happen when folding expressions
5289 made during type construction. */
5290 if (this_offset == 0)
5292 else if (! TREE_CONSTANT (this_offset)
5293 && contains_placeholder_p (this_offset))
5294 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5296 offset = size_binop (PLUS_EXPR, offset, this_offset);
5297 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5298 DECL_FIELD_BIT_OFFSET (field));
5300 if (! host_integerp (offset, 0))
5301 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5304 else if (TREE_CODE (exp) == ARRAY_REF)
5306 tree index = TREE_OPERAND (exp, 1);
5307 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5308 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5309 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5311 /* We assume all arrays have sizes that are a multiple of a byte.
5312 First subtract the lower bound, if any, in the type of the
5313 index, then convert to sizetype and multiply by the size of the
5315 if (low_bound != 0 && ! integer_zerop (low_bound))
5316 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5319 /* If the index has a self-referential type, pass it to a
5320 WITH_RECORD_EXPR; if the component size is, pass our
5321 component to one. */
5322 if (! TREE_CONSTANT (index)
5323 && contains_placeholder_p (index))
5324 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5325 if (! TREE_CONSTANT (unit_size)
5326 && contains_placeholder_p (unit_size))
5327 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5328 TREE_OPERAND (exp, 0));
5330 offset = size_binop (PLUS_EXPR, offset,
5331 size_binop (MULT_EXPR,
5332 convert (sizetype, index),
5336 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5337 && ! ((TREE_CODE (exp) == NOP_EXPR
5338 || TREE_CODE (exp) == CONVERT_EXPR)
5339 && (TYPE_MODE (TREE_TYPE (exp))
5340 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5343 /* If any reference in the chain is volatile, the effect is volatile. */
5344 if (TREE_THIS_VOLATILE (exp))
5347 /* If the offset is non-constant already, then we can't assume any
5348 alignment more than the alignment here. */
5349 if (! TREE_CONSTANT (offset))
5350 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5352 exp = TREE_OPERAND (exp, 0);
5356 alignment = MIN (alignment, DECL_ALIGN (exp));
5357 else if (TREE_TYPE (exp) != 0)
5358 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5360 /* If OFFSET is constant, see if we can return the whole thing as a
5361 constant bit position. Otherwise, split it up. */
5362 if (host_integerp (offset, 0)
5363 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5365 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5366 && host_integerp (tem, 0))
5367 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5369 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5372 *palignment = alignment;
5376 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5378 static enum memory_use_mode
5379 get_memory_usage_from_modifier (modifier)
5380 enum expand_modifier modifier;
5386 return MEMORY_USE_RO;
5388 case EXPAND_MEMORY_USE_WO:
5389 return MEMORY_USE_WO;
5391 case EXPAND_MEMORY_USE_RW:
5392 return MEMORY_USE_RW;
5394 case EXPAND_MEMORY_USE_DONT:
5395 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5396 MEMORY_USE_DONT, because they are modifiers to a call of
5397 expand_expr in the ADDR_EXPR case of expand_expr. */
5398 case EXPAND_CONST_ADDRESS:
5399 case EXPAND_INITIALIZER:
5400 return MEMORY_USE_DONT;
5401 case EXPAND_MEMORY_USE_BAD:
5407 /* Given an rtx VALUE that may contain additions and multiplications,
5408 return an equivalent value that just refers to a register or memory.
5409 This is done by generating instructions to perform the arithmetic
5410 and returning a pseudo-register containing the value.
5412 The returned value may be a REG, SUBREG, MEM or constant. */
5415 force_operand (value, target)
5418 register optab binoptab = 0;
5419 /* Use a temporary to force order of execution of calls to
5423 /* Use subtarget as the target for operand 0 of a binary operation. */
5424 register rtx subtarget = get_subtarget (target);
5426 /* Check for a PIC address load. */
5428 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5429 && XEXP (value, 0) == pic_offset_table_rtx
5430 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5431 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5432 || GET_CODE (XEXP (value, 1)) == CONST))
5435 subtarget = gen_reg_rtx (GET_MODE (value));
5436 emit_move_insn (subtarget, value);
5440 if (GET_CODE (value) == PLUS)
5441 binoptab = add_optab;
5442 else if (GET_CODE (value) == MINUS)
5443 binoptab = sub_optab;
5444 else if (GET_CODE (value) == MULT)
5446 op2 = XEXP (value, 1);
5447 if (!CONSTANT_P (op2)
5448 && !(GET_CODE (op2) == REG && op2 != subtarget))
5450 tmp = force_operand (XEXP (value, 0), subtarget);
5451 return expand_mult (GET_MODE (value), tmp,
5452 force_operand (op2, NULL_RTX),
5458 op2 = XEXP (value, 1);
5459 if (!CONSTANT_P (op2)
5460 && !(GET_CODE (op2) == REG && op2 != subtarget))
5462 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5464 binoptab = add_optab;
5465 op2 = negate_rtx (GET_MODE (value), op2);
5468 /* Check for an addition with OP2 a constant integer and our first
5469 operand a PLUS of a virtual register and something else. In that
5470 case, we want to emit the sum of the virtual register and the
5471 constant first and then add the other value. This allows virtual
5472 register instantiation to simply modify the constant rather than
5473 creating another one around this addition. */
5474 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5475 && GET_CODE (XEXP (value, 0)) == PLUS
5476 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5477 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5478 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5480 rtx temp = expand_binop (GET_MODE (value), binoptab,
5481 XEXP (XEXP (value, 0), 0), op2,
5482 subtarget, 0, OPTAB_LIB_WIDEN);
5483 return expand_binop (GET_MODE (value), binoptab, temp,
5484 force_operand (XEXP (XEXP (value, 0), 1), 0),
5485 target, 0, OPTAB_LIB_WIDEN);
5488 tmp = force_operand (XEXP (value, 0), subtarget);
5489 return expand_binop (GET_MODE (value), binoptab, tmp,
5490 force_operand (op2, NULL_RTX),
5491 target, 0, OPTAB_LIB_WIDEN);
5492 /* We give UNSIGNEDP = 0 to expand_binop
5493 because the only operations we are expanding here are signed ones. */
5498 /* Subroutine of expand_expr:
5499 save the non-copied parts (LIST) of an expr (LHS), and return a list
5500 which can restore these values to their previous values,
5501 should something modify their storage. */
5504 save_noncopied_parts (lhs, list)
5511 for (tail = list; tail; tail = TREE_CHAIN (tail))
5512 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5513 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5516 tree part = TREE_VALUE (tail);
5517 tree part_type = TREE_TYPE (part);
5518 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5520 = assign_temp (build_qualified_type (part_type,
5521 (TYPE_QUALS (part_type)
5522 | TYPE_QUAL_CONST)),
5525 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5526 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5527 parts = tree_cons (to_be_saved,
5528 build (RTL_EXPR, part_type, NULL_TREE,
5531 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5536 /* Subroutine of expand_expr:
5537 record the non-copied parts (LIST) of an expr (LHS), and return a list
5538 which specifies the initial values of these parts. */
5541 init_noncopied_parts (lhs, list)
5548 for (tail = list; tail; tail = TREE_CHAIN (tail))
5549 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5550 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5551 else if (TREE_PURPOSE (tail))
5553 tree part = TREE_VALUE (tail);
5554 tree part_type = TREE_TYPE (part);
5555 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5556 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5561 /* Subroutine of expand_expr: return nonzero iff there is no way that
5562 EXP can reference X, which is being modified. TOP_P is nonzero if this
5563 call is going to be used to determine whether we need a temporary
5564 for EXP, as opposed to a recursive call to this function.
5566 It is always safe for this routine to return zero since it merely
5567 searches for optimization opportunities. */
5570 safe_from_p (x, exp, top_p)
5577 static tree save_expr_list;
5580 /* If EXP has varying size, we MUST use a target since we currently
5581 have no way of allocating temporaries of variable size
5582 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5583 So we assume here that something at a higher level has prevented a
5584 clash. This is somewhat bogus, but the best we can do. Only
5585 do this when X is BLKmode and when we are at the top level. */
5586 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5587 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5588 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5589 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5590 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5592 && GET_MODE (x) == BLKmode)
5593 /* If X is in the outgoing argument area, it is always safe. */
5594 || (GET_CODE (x) == MEM
5595 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5596 || (GET_CODE (XEXP (x, 0)) == PLUS
5597 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5600 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5601 find the underlying pseudo. */
5602 if (GET_CODE (x) == SUBREG)
5605 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5609 /* A SAVE_EXPR might appear many times in the expression passed to the
5610 top-level safe_from_p call, and if it has a complex subexpression,
5611 examining it multiple times could result in a combinatorial explosion.
5612 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5613 with optimization took about 28 minutes to compile -- even though it was
5614 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5615 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5616 we have processed. Note that the only test of top_p was above. */
5625 rtn = safe_from_p (x, exp, 0);
5627 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5628 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5633 /* Now look at our tree code and possibly recurse. */
5634 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5637 exp_rtl = DECL_RTL (exp);
5644 if (TREE_CODE (exp) == TREE_LIST)
5645 return ((TREE_VALUE (exp) == 0
5646 || safe_from_p (x, TREE_VALUE (exp), 0))
5647 && (TREE_CHAIN (exp) == 0
5648 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5649 else if (TREE_CODE (exp) == ERROR_MARK)
5650 return 1; /* An already-visited SAVE_EXPR? */
5655 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5659 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5660 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5664 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5665 the expression. If it is set, we conflict iff we are that rtx or
5666 both are in memory. Otherwise, we check all operands of the
5667 expression recursively. */
5669 switch (TREE_CODE (exp))
5672 return (staticp (TREE_OPERAND (exp, 0))
5673 || TREE_STATIC (exp)
5674 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5677 if (GET_CODE (x) == MEM
5678 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5679 get_alias_set (exp)))
5684 /* Assume that the call will clobber all hard registers and
5686 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5687 || GET_CODE (x) == MEM)
5692 /* If a sequence exists, we would have to scan every instruction
5693 in the sequence to see if it was safe. This is probably not
5695 if (RTL_EXPR_SEQUENCE (exp))
5698 exp_rtl = RTL_EXPR_RTL (exp);
5701 case WITH_CLEANUP_EXPR:
5702 exp_rtl = RTL_EXPR_RTL (exp);
5705 case CLEANUP_POINT_EXPR:
5706 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5709 exp_rtl = SAVE_EXPR_RTL (exp);
5713 /* If we've already scanned this, don't do it again. Otherwise,
5714 show we've scanned it and record for clearing the flag if we're
5716 if (TREE_PRIVATE (exp))
5719 TREE_PRIVATE (exp) = 1;
5720 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5722 TREE_PRIVATE (exp) = 0;
5726 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5730 /* The only operand we look at is operand 1. The rest aren't
5731 part of the expression. */
5732 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5734 case METHOD_CALL_EXPR:
5735 /* This takes a rtx argument, but shouldn't appear here. */
5742 /* If we have an rtx, we do not need to scan our operands. */
5746 nops = first_rtl_op (TREE_CODE (exp));
5747 for (i = 0; i < nops; i++)
5748 if (TREE_OPERAND (exp, i) != 0
5749 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5752 /* If this is a language-specific tree code, it may require
5753 special handling. */
5754 if (TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE
5756 && !(*lang_safe_from_p) (x, exp))
5760 /* If we have an rtl, find any enclosed object. Then see if we conflict
5764 if (GET_CODE (exp_rtl) == SUBREG)
5766 exp_rtl = SUBREG_REG (exp_rtl);
5767 if (GET_CODE (exp_rtl) == REG
5768 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5772 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5773 are memory and they conflict. */
5774 return ! (rtx_equal_p (x, exp_rtl)
5775 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5776 && true_dependence (exp_rtl, GET_MODE (x), x,
5777 rtx_addr_varies_p)));
5780 /* If we reach here, it is safe. */
5784 /* Subroutine of expand_expr: return nonzero iff EXP is an
5785 expression whose type is statically determinable. */
5791 if (TREE_CODE (exp) == PARM_DECL
5792 || TREE_CODE (exp) == VAR_DECL
5793 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5794 || TREE_CODE (exp) == COMPONENT_REF
5795 || TREE_CODE (exp) == ARRAY_REF)
5800 /* Subroutine of expand_expr: return rtx if EXP is a
5801 variable or parameter; else return 0. */
5808 switch (TREE_CODE (exp))
5812 return DECL_RTL (exp);
5818 #ifdef MAX_INTEGER_COMPUTATION_MODE
5820 check_max_integer_computation_mode (exp)
5823 enum tree_code code;
5824 enum machine_mode mode;
5826 /* Strip any NOPs that don't change the mode. */
5828 code = TREE_CODE (exp);
5830 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5831 if (code == NOP_EXPR
5832 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5835 /* First check the type of the overall operation. We need only look at
5836 unary, binary and relational operations. */
5837 if (TREE_CODE_CLASS (code) == '1'
5838 || TREE_CODE_CLASS (code) == '2'
5839 || TREE_CODE_CLASS (code) == '<')
5841 mode = TYPE_MODE (TREE_TYPE (exp));
5842 if (GET_MODE_CLASS (mode) == MODE_INT
5843 && mode > MAX_INTEGER_COMPUTATION_MODE)
5844 fatal ("unsupported wide integer operation");
5847 /* Check operand of a unary op. */
5848 if (TREE_CODE_CLASS (code) == '1')
5850 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5851 if (GET_MODE_CLASS (mode) == MODE_INT
5852 && mode > MAX_INTEGER_COMPUTATION_MODE)
5853 fatal ("unsupported wide integer operation");
5856 /* Check operands of a binary/comparison op. */
5857 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5859 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5860 if (GET_MODE_CLASS (mode) == MODE_INT
5861 && mode > MAX_INTEGER_COMPUTATION_MODE)
5862 fatal ("unsupported wide integer operation");
5864 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5865 if (GET_MODE_CLASS (mode) == MODE_INT
5866 && mode > MAX_INTEGER_COMPUTATION_MODE)
5867 fatal ("unsupported wide integer operation");
5872 /* expand_expr: generate code for computing expression EXP.
5873 An rtx for the computed value is returned. The value is never null.
5874 In the case of a void EXP, const0_rtx is returned.
5876 The value may be stored in TARGET if TARGET is nonzero.
5877 TARGET is just a suggestion; callers must assume that
5878 the rtx returned may not be the same as TARGET.
5880 If TARGET is CONST0_RTX, it means that the value will be ignored.
5882 If TMODE is not VOIDmode, it suggests generating the
5883 result in mode TMODE. But this is done only when convenient.
5884 Otherwise, TMODE is ignored and the value generated in its natural mode.
5885 TMODE is just a suggestion; callers must assume that
5886 the rtx returned may not have mode TMODE.
5888 Note that TARGET may have neither TMODE nor MODE. In that case, it
5889 probably will not be used.
5891 If MODIFIER is EXPAND_SUM then when EXP is an addition
5892 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5893 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5894 products as above, or REG or MEM, or constant.
5895 Ordinarily in such cases we would output mul or add instructions
5896 and then return a pseudo reg containing the sum.
5898 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5899 it also marks a label as absolutely required (it can't be dead).
5900 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5901 This is used for outputting expressions used in initializers.
5903 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5904 with a constant address even if that address is not normally legitimate.
5905 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5908 expand_expr (exp, target, tmode, modifier)
5911 enum machine_mode tmode;
5912 enum expand_modifier modifier;
5914 register rtx op0, op1, temp;
5915 tree type = TREE_TYPE (exp);
5916 int unsignedp = TREE_UNSIGNED (type);
5917 register enum machine_mode mode;
5918 register enum tree_code code = TREE_CODE (exp);
5920 rtx subtarget, original_target;
5923 /* Used by check-memory-usage to make modifier read only. */
5924 enum expand_modifier ro_modifier;
5926 /* Handle ERROR_MARK before anybody tries to access its type. */
5927 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5929 op0 = CONST0_RTX (tmode);
5935 mode = TYPE_MODE (type);
5936 /* Use subtarget as the target for operand 0 of a binary operation. */
5937 subtarget = get_subtarget (target);
5938 original_target = target;
5939 ignore = (target == const0_rtx
5940 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5941 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5942 || code == COND_EXPR)
5943 && TREE_CODE (type) == VOID_TYPE));
5945 /* Make a read-only version of the modifier. */
5946 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5947 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5948 ro_modifier = modifier;
5950 ro_modifier = EXPAND_NORMAL;
5952 /* If we are going to ignore this result, we need only do something
5953 if there is a side-effect somewhere in the expression. If there
5954 is, short-circuit the most common cases here. Note that we must
5955 not call expand_expr with anything but const0_rtx in case this
5956 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5960 if (! TREE_SIDE_EFFECTS (exp))
5963 /* Ensure we reference a volatile object even if value is ignored, but
5964 don't do this if all we are doing is taking its address. */
5965 if (TREE_THIS_VOLATILE (exp)
5966 && TREE_CODE (exp) != FUNCTION_DECL
5967 && mode != VOIDmode && mode != BLKmode
5968 && modifier != EXPAND_CONST_ADDRESS)
5970 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5971 if (GET_CODE (temp) == MEM)
5972 temp = copy_to_reg (temp);
5976 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5977 || code == INDIRECT_REF || code == BUFFER_REF)
5978 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5979 VOIDmode, ro_modifier);
5980 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5981 || code == ARRAY_REF)
5983 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5984 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5987 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5988 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5989 /* If the second operand has no side effects, just evaluate
5991 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5992 VOIDmode, ro_modifier);
5993 else if (code == BIT_FIELD_REF)
5995 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5996 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5997 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
6004 #ifdef MAX_INTEGER_COMPUTATION_MODE
6005 /* Only check stuff here if the mode we want is different from the mode
6006 of the expression; if it's the same, check_max_integer_computiation_mode
6007 will handle it. Do we really need to check this stuff at all? */
6010 && GET_MODE (target) != mode
6011 && TREE_CODE (exp) != INTEGER_CST
6012 && TREE_CODE (exp) != PARM_DECL
6013 && TREE_CODE (exp) != ARRAY_REF
6014 && TREE_CODE (exp) != COMPONENT_REF
6015 && TREE_CODE (exp) != BIT_FIELD_REF
6016 && TREE_CODE (exp) != INDIRECT_REF
6017 && TREE_CODE (exp) != CALL_EXPR
6018 && TREE_CODE (exp) != VAR_DECL
6019 && TREE_CODE (exp) != RTL_EXPR)
6021 enum machine_mode mode = GET_MODE (target);
6023 if (GET_MODE_CLASS (mode) == MODE_INT
6024 && mode > MAX_INTEGER_COMPUTATION_MODE)
6025 fatal ("unsupported wide integer operation");
6029 && TREE_CODE (exp) != INTEGER_CST
6030 && TREE_CODE (exp) != PARM_DECL
6031 && TREE_CODE (exp) != ARRAY_REF
6032 && TREE_CODE (exp) != COMPONENT_REF
6033 && TREE_CODE (exp) != BIT_FIELD_REF
6034 && TREE_CODE (exp) != INDIRECT_REF
6035 && TREE_CODE (exp) != VAR_DECL
6036 && TREE_CODE (exp) != CALL_EXPR
6037 && TREE_CODE (exp) != RTL_EXPR
6038 && GET_MODE_CLASS (tmode) == MODE_INT
6039 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6040 fatal ("unsupported wide integer operation");
6042 check_max_integer_computation_mode (exp);
6045 /* If will do cse, generate all results into pseudo registers
6046 since 1) that allows cse to find more things
6047 and 2) otherwise cse could produce an insn the machine
6050 if (! cse_not_expected && mode != BLKmode && target
6051 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6058 tree function = decl_function_context (exp);
6059 /* Handle using a label in a containing function. */
6060 if (function != current_function_decl
6061 && function != inline_function_decl && function != 0)
6063 struct function *p = find_function_data (function);
6064 p->expr->x_forced_labels
6065 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6066 p->expr->x_forced_labels);
6070 if (modifier == EXPAND_INITIALIZER)
6071 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6076 temp = gen_rtx_MEM (FUNCTION_MODE,
6077 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6078 if (function != current_function_decl
6079 && function != inline_function_decl && function != 0)
6080 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6085 if (DECL_RTL (exp) == 0)
6087 error_with_decl (exp, "prior parameter's size depends on `%s'");
6088 return CONST0_RTX (mode);
6091 /* ... fall through ... */
6094 /* If a static var's type was incomplete when the decl was written,
6095 but the type is complete now, lay out the decl now. */
6096 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6097 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6099 layout_decl (exp, 0);
6100 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6103 /* Although static-storage variables start off initialized, according to
6104 ANSI C, a memcpy could overwrite them with uninitialized values. So
6105 we check them too. This also lets us check for read-only variables
6106 accessed via a non-const declaration, in case it won't be detected
6107 any other way (e.g., in an embedded system or OS kernel without
6110 Aggregates are not checked here; they're handled elsewhere. */
6111 if (cfun && current_function_check_memory_usage
6113 && GET_CODE (DECL_RTL (exp)) == MEM
6114 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6116 enum memory_use_mode memory_usage;
6117 memory_usage = get_memory_usage_from_modifier (modifier);
6119 in_check_memory_usage = 1;
6120 if (memory_usage != MEMORY_USE_DONT)
6121 emit_library_call (chkr_check_addr_libfunc,
6122 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6123 XEXP (DECL_RTL (exp), 0), Pmode,
6124 GEN_INT (int_size_in_bytes (type)),
6125 TYPE_MODE (sizetype),
6126 GEN_INT (memory_usage),
6127 TYPE_MODE (integer_type_node));
6128 in_check_memory_usage = 0;
6131 /* ... fall through ... */
6135 if (DECL_RTL (exp) == 0)
6138 /* Ensure variable marked as used even if it doesn't go through
6139 a parser. If it hasn't be used yet, write out an external
6141 if (! TREE_USED (exp))
6143 assemble_external (exp);
6144 TREE_USED (exp) = 1;
6147 /* Show we haven't gotten RTL for this yet. */
6150 /* Handle variables inherited from containing functions. */
6151 context = decl_function_context (exp);
6153 /* We treat inline_function_decl as an alias for the current function
6154 because that is the inline function whose vars, types, etc.
6155 are being merged into the current function.
6156 See expand_inline_function. */
6158 if (context != 0 && context != current_function_decl
6159 && context != inline_function_decl
6160 /* If var is static, we don't need a static chain to access it. */
6161 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6162 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6166 /* Mark as non-local and addressable. */
6167 DECL_NONLOCAL (exp) = 1;
6168 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6170 mark_addressable (exp);
6171 if (GET_CODE (DECL_RTL (exp)) != MEM)
6173 addr = XEXP (DECL_RTL (exp), 0);
6174 if (GET_CODE (addr) == MEM)
6175 addr = change_address (addr, Pmode,
6176 fix_lexical_addr (XEXP (addr, 0), exp));
6178 addr = fix_lexical_addr (addr, exp);
6180 temp = change_address (DECL_RTL (exp), mode, addr);
6183 /* This is the case of an array whose size is to be determined
6184 from its initializer, while the initializer is still being parsed.
6187 else if (GET_CODE (DECL_RTL (exp)) == MEM
6188 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6189 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6190 XEXP (DECL_RTL (exp), 0));
6192 /* If DECL_RTL is memory, we are in the normal case and either
6193 the address is not valid or it is not a register and -fforce-addr
6194 is specified, get the address into a register. */
6196 else if (GET_CODE (DECL_RTL (exp)) == MEM
6197 && modifier != EXPAND_CONST_ADDRESS
6198 && modifier != EXPAND_SUM
6199 && modifier != EXPAND_INITIALIZER
6200 && (! memory_address_p (DECL_MODE (exp),
6201 XEXP (DECL_RTL (exp), 0))
6203 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6204 temp = change_address (DECL_RTL (exp), VOIDmode,
6205 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6207 /* If we got something, return it. But first, set the alignment
6208 the address is a register. */
6211 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6212 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6217 /* If the mode of DECL_RTL does not match that of the decl, it
6218 must be a promoted value. We return a SUBREG of the wanted mode,
6219 but mark it so that we know that it was already extended. */
6221 if (GET_CODE (DECL_RTL (exp)) == REG
6222 && GET_MODE (DECL_RTL (exp)) != mode)
6224 /* Get the signedness used for this variable. Ensure we get the
6225 same mode we got when the variable was declared. */
6226 if (GET_MODE (DECL_RTL (exp))
6227 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6230 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6231 SUBREG_PROMOTED_VAR_P (temp) = 1;
6232 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6236 return DECL_RTL (exp);
6239 return immed_double_const (TREE_INT_CST_LOW (exp),
6240 TREE_INT_CST_HIGH (exp), mode);
6243 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6244 EXPAND_MEMORY_USE_BAD);
6247 /* If optimized, generate immediate CONST_DOUBLE
6248 which will be turned into memory by reload if necessary.
6250 We used to force a register so that loop.c could see it. But
6251 this does not allow gen_* patterns to perform optimizations with
6252 the constants. It also produces two insns in cases like "x = 1.0;".
6253 On most machines, floating-point constants are not permitted in
6254 many insns, so we'd end up copying it to a register in any case.
6256 Now, we do the copying in expand_binop, if appropriate. */
6257 return immed_real_const (exp);
6261 if (! TREE_CST_RTL (exp))
6262 output_constant_def (exp, 1);
6264 /* TREE_CST_RTL probably contains a constant address.
6265 On RISC machines where a constant address isn't valid,
6266 make some insns to get that address into a register. */
6267 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6268 && modifier != EXPAND_CONST_ADDRESS
6269 && modifier != EXPAND_INITIALIZER
6270 && modifier != EXPAND_SUM
6271 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6273 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6274 return change_address (TREE_CST_RTL (exp), VOIDmode,
6275 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6276 return TREE_CST_RTL (exp);
6278 case EXPR_WITH_FILE_LOCATION:
6281 const char *saved_input_filename = input_filename;
6282 int saved_lineno = lineno;
6283 input_filename = EXPR_WFL_FILENAME (exp);
6284 lineno = EXPR_WFL_LINENO (exp);
6285 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6286 emit_line_note (input_filename, lineno);
6287 /* Possibly avoid switching back and force here. */
6288 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6289 input_filename = saved_input_filename;
6290 lineno = saved_lineno;
6295 context = decl_function_context (exp);
6297 /* If this SAVE_EXPR was at global context, assume we are an
6298 initialization function and move it into our context. */
6300 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6302 /* We treat inline_function_decl as an alias for the current function
6303 because that is the inline function whose vars, types, etc.
6304 are being merged into the current function.
6305 See expand_inline_function. */
6306 if (context == current_function_decl || context == inline_function_decl)
6309 /* If this is non-local, handle it. */
6312 /* The following call just exists to abort if the context is
6313 not of a containing function. */
6314 find_function_data (context);
6316 temp = SAVE_EXPR_RTL (exp);
6317 if (temp && GET_CODE (temp) == REG)
6319 put_var_into_stack (exp);
6320 temp = SAVE_EXPR_RTL (exp);
6322 if (temp == 0 || GET_CODE (temp) != MEM)
6324 return change_address (temp, mode,
6325 fix_lexical_addr (XEXP (temp, 0), exp));
6327 if (SAVE_EXPR_RTL (exp) == 0)
6329 if (mode == VOIDmode)
6332 temp = assign_temp (build_qualified_type (type,
6334 | TYPE_QUAL_CONST)),
6337 SAVE_EXPR_RTL (exp) = temp;
6338 if (!optimize && GET_CODE (temp) == REG)
6339 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6342 /* If the mode of TEMP does not match that of the expression, it
6343 must be a promoted value. We pass store_expr a SUBREG of the
6344 wanted mode but mark it so that we know that it was already
6345 extended. Note that `unsignedp' was modified above in
6348 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6350 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6351 SUBREG_PROMOTED_VAR_P (temp) = 1;
6352 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6355 if (temp == const0_rtx)
6356 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6357 EXPAND_MEMORY_USE_BAD);
6359 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6361 TREE_USED (exp) = 1;
6364 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6365 must be a promoted value. We return a SUBREG of the wanted mode,
6366 but mark it so that we know that it was already extended. */
6368 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6369 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6371 /* Compute the signedness and make the proper SUBREG. */
6372 promote_mode (type, mode, &unsignedp, 0);
6373 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6374 SUBREG_PROMOTED_VAR_P (temp) = 1;
6375 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6379 return SAVE_EXPR_RTL (exp);
6384 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6385 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6389 case PLACEHOLDER_EXPR:
6391 tree placeholder_expr;
6393 /* If there is an object on the head of the placeholder list,
6394 see if some object in it of type TYPE or a pointer to it. For
6395 further information, see tree.def. */
6396 for (placeholder_expr = placeholder_list;
6397 placeholder_expr != 0;
6398 placeholder_expr = TREE_CHAIN (placeholder_expr))
6400 tree need_type = TYPE_MAIN_VARIANT (type);
6402 tree old_list = placeholder_list;
6405 /* Find the outermost reference that is of the type we want.
6406 If none, see if any object has a type that is a pointer to
6407 the type we want. */
6408 for (elt = TREE_PURPOSE (placeholder_expr);
6409 elt != 0 && object == 0;
6411 = ((TREE_CODE (elt) == COMPOUND_EXPR
6412 || TREE_CODE (elt) == COND_EXPR)
6413 ? TREE_OPERAND (elt, 1)
6414 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6415 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6416 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6417 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6418 ? TREE_OPERAND (elt, 0) : 0))
6419 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6422 for (elt = TREE_PURPOSE (placeholder_expr);
6423 elt != 0 && object == 0;
6425 = ((TREE_CODE (elt) == COMPOUND_EXPR
6426 || TREE_CODE (elt) == COND_EXPR)
6427 ? TREE_OPERAND (elt, 1)
6428 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6429 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6430 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6431 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6432 ? TREE_OPERAND (elt, 0) : 0))
6433 if (POINTER_TYPE_P (TREE_TYPE (elt))
6434 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6436 object = build1 (INDIRECT_REF, need_type, elt);
6440 /* Expand this object skipping the list entries before
6441 it was found in case it is also a PLACEHOLDER_EXPR.
6442 In that case, we want to translate it using subsequent
6444 placeholder_list = TREE_CHAIN (placeholder_expr);
6445 temp = expand_expr (object, original_target, tmode,
6447 placeholder_list = old_list;
6453 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6456 case WITH_RECORD_EXPR:
6457 /* Put the object on the placeholder list, expand our first operand,
6458 and pop the list. */
6459 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6461 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6462 tmode, ro_modifier);
6463 placeholder_list = TREE_CHAIN (placeholder_list);
6467 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6468 expand_goto (TREE_OPERAND (exp, 0));
6470 expand_computed_goto (TREE_OPERAND (exp, 0));
6474 expand_exit_loop_if_false (NULL_PTR,
6475 invert_truthvalue (TREE_OPERAND (exp, 0)));
6478 case LABELED_BLOCK_EXPR:
6479 if (LABELED_BLOCK_BODY (exp))
6480 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6481 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6484 case EXIT_BLOCK_EXPR:
6485 if (EXIT_BLOCK_RETURN (exp))
6486 sorry ("returned value in block_exit_expr");
6487 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6492 expand_start_loop (1);
6493 expand_expr_stmt (TREE_OPERAND (exp, 0));
6501 tree vars = TREE_OPERAND (exp, 0);
6502 int vars_need_expansion = 0;
6504 /* Need to open a binding contour here because
6505 if there are any cleanups they must be contained here. */
6506 expand_start_bindings (2);
6508 /* Mark the corresponding BLOCK for output in its proper place. */
6509 if (TREE_OPERAND (exp, 2) != 0
6510 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6511 insert_block (TREE_OPERAND (exp, 2));
6513 /* If VARS have not yet been expanded, expand them now. */
6516 if (DECL_RTL (vars) == 0)
6518 vars_need_expansion = 1;
6521 expand_decl_init (vars);
6522 vars = TREE_CHAIN (vars);
6525 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6527 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6533 if (RTL_EXPR_SEQUENCE (exp))
6535 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6537 emit_insns (RTL_EXPR_SEQUENCE (exp));
6538 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6540 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6541 free_temps_for_rtl_expr (exp);
6542 return RTL_EXPR_RTL (exp);
6545 /* If we don't need the result, just ensure we evaluate any
6550 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6551 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6552 EXPAND_MEMORY_USE_BAD);
6556 /* All elts simple constants => refer to a constant in memory. But
6557 if this is a non-BLKmode mode, let it store a field at a time
6558 since that should make a CONST_INT or CONST_DOUBLE when we
6559 fold. Likewise, if we have a target we can use, it is best to
6560 store directly into the target unless the type is large enough
6561 that memcpy will be used. If we are making an initializer and
6562 all operands are constant, put it in memory as well. */
6563 else if ((TREE_STATIC (exp)
6564 && ((mode == BLKmode
6565 && ! (target != 0 && safe_from_p (target, exp, 1)))
6566 || TREE_ADDRESSABLE (exp)
6567 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6568 && (! MOVE_BY_PIECES_P
6569 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6571 && ! mostly_zeros_p (exp))))
6572 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6574 rtx constructor = output_constant_def (exp, 1);
6576 if (modifier != EXPAND_CONST_ADDRESS
6577 && modifier != EXPAND_INITIALIZER
6578 && modifier != EXPAND_SUM
6579 && (! memory_address_p (GET_MODE (constructor),
6580 XEXP (constructor, 0))
6582 && GET_CODE (XEXP (constructor, 0)) != REG)))
6583 constructor = change_address (constructor, VOIDmode,
6584 XEXP (constructor, 0));
6589 /* Handle calls that pass values in multiple non-contiguous
6590 locations. The Irix 6 ABI has examples of this. */
6591 if (target == 0 || ! safe_from_p (target, exp, 1)
6592 || GET_CODE (target) == PARALLEL)
6594 = assign_temp (build_qualified_type (type,
6596 | (TREE_READONLY (exp)
6597 * TYPE_QUAL_CONST))),
6598 TREE_ADDRESSABLE (exp), 1, 1);
6600 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6601 int_size_in_bytes (TREE_TYPE (exp)));
6607 tree exp1 = TREE_OPERAND (exp, 0);
6609 tree string = string_constant (exp1, &index);
6611 /* Try to optimize reads from const strings. */
6613 && TREE_CODE (string) == STRING_CST
6614 && TREE_CODE (index) == INTEGER_CST
6615 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6616 && GET_MODE_CLASS (mode) == MODE_INT
6617 && GET_MODE_SIZE (mode) == 1
6618 && modifier != EXPAND_MEMORY_USE_WO)
6620 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6622 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6623 op0 = memory_address (mode, op0);
6625 if (cfun && current_function_check_memory_usage
6626 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6628 enum memory_use_mode memory_usage;
6629 memory_usage = get_memory_usage_from_modifier (modifier);
6631 if (memory_usage != MEMORY_USE_DONT)
6633 in_check_memory_usage = 1;
6634 emit_library_call (chkr_check_addr_libfunc,
6635 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6636 Pmode, GEN_INT (int_size_in_bytes (type)),
6637 TYPE_MODE (sizetype),
6638 GEN_INT (memory_usage),
6639 TYPE_MODE (integer_type_node));
6640 in_check_memory_usage = 0;
6644 temp = gen_rtx_MEM (mode, op0);
6645 set_mem_attributes (temp, exp, 0);
6647 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6648 here, because, in C and C++, the fact that a location is accessed
6649 through a pointer to const does not mean that the value there can
6650 never change. Languages where it can never change should
6651 also set TREE_STATIC. */
6652 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6654 /* If we are writing to this object and its type is a record with
6655 readonly fields, we must mark it as readonly so it will
6656 conflict with readonly references to those fields. */
6657 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6658 RTX_UNCHANGING_P (temp) = 1;
6664 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6668 tree array = TREE_OPERAND (exp, 0);
6669 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6670 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6671 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6674 /* Optimize the special-case of a zero lower bound.
6676 We convert the low_bound to sizetype to avoid some problems
6677 with constant folding. (E.g. suppose the lower bound is 1,
6678 and its mode is QI. Without the conversion, (ARRAY
6679 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6680 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6682 if (! integer_zerop (low_bound))
6683 index = size_diffop (index, convert (sizetype, low_bound));
6685 /* Fold an expression like: "foo"[2].
6686 This is not done in fold so it won't happen inside &.
6687 Don't fold if this is for wide characters since it's too
6688 difficult to do correctly and this is a very rare case. */
6690 if (TREE_CODE (array) == STRING_CST
6691 && TREE_CODE (index) == INTEGER_CST
6692 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6693 && GET_MODE_CLASS (mode) == MODE_INT
6694 && GET_MODE_SIZE (mode) == 1)
6696 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6698 /* If this is a constant index into a constant array,
6699 just get the value from the array. Handle both the cases when
6700 we have an explicit constructor and when our operand is a variable
6701 that was declared const. */
6703 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6704 && TREE_CODE (index) == INTEGER_CST
6705 && 0 > compare_tree_int (index,
6706 list_length (CONSTRUCTOR_ELTS
6707 (TREE_OPERAND (exp, 0)))))
6711 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6712 i = TREE_INT_CST_LOW (index);
6713 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6717 return expand_expr (fold (TREE_VALUE (elem)), target,
6718 tmode, ro_modifier);
6721 else if (optimize >= 1
6722 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6723 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6724 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6726 if (TREE_CODE (index) == INTEGER_CST)
6728 tree init = DECL_INITIAL (array);
6730 if (TREE_CODE (init) == CONSTRUCTOR)
6734 for (elem = CONSTRUCTOR_ELTS (init);
6736 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6737 elem = TREE_CHAIN (elem))
6741 return expand_expr (fold (TREE_VALUE (elem)), target,
6742 tmode, ro_modifier);
6744 else if (TREE_CODE (init) == STRING_CST
6745 && 0 > compare_tree_int (index,
6746 TREE_STRING_LENGTH (init)))
6748 tree type = TREE_TYPE (TREE_TYPE (init));
6749 enum machine_mode mode = TYPE_MODE (type);
6751 if (GET_MODE_CLASS (mode) == MODE_INT
6752 && GET_MODE_SIZE (mode) == 1)
6754 (TREE_STRING_POINTER
6755 (init)[TREE_INT_CST_LOW (index)]));
6764 /* If the operand is a CONSTRUCTOR, we can just extract the
6765 appropriate field if it is present. Don't do this if we have
6766 already written the data since we want to refer to that copy
6767 and varasm.c assumes that's what we'll do. */
6768 if (code != ARRAY_REF
6769 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6770 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6774 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6775 elt = TREE_CHAIN (elt))
6776 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6777 /* We can normally use the value of the field in the
6778 CONSTRUCTOR. However, if this is a bitfield in
6779 an integral mode that we can fit in a HOST_WIDE_INT,
6780 we must mask only the number of bits in the bitfield,
6781 since this is done implicitly by the constructor. If
6782 the bitfield does not meet either of those conditions,
6783 we can't do this optimization. */
6784 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6785 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6787 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6788 <= HOST_BITS_PER_WIDE_INT))))
6790 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6791 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6793 HOST_WIDE_INT bitsize
6794 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6796 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6798 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6799 op0 = expand_and (op0, op1, target);
6803 enum machine_mode imode
6804 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6806 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6809 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6811 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6821 enum machine_mode mode1;
6822 HOST_WIDE_INT bitsize, bitpos;
6825 unsigned int alignment;
6826 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6827 &mode1, &unsignedp, &volatilep,
6830 /* If we got back the original object, something is wrong. Perhaps
6831 we are evaluating an expression too early. In any event, don't
6832 infinitely recurse. */
6836 /* If TEM's type is a union of variable size, pass TARGET to the inner
6837 computation, since it will need a temporary and TARGET is known
6838 to have to do. This occurs in unchecked conversion in Ada. */
6840 op0 = expand_expr (tem,
6841 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6842 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6844 ? target : NULL_RTX),
6846 (modifier == EXPAND_INITIALIZER
6847 || modifier == EXPAND_CONST_ADDRESS)
6848 ? modifier : EXPAND_NORMAL);
6850 /* If this is a constant, put it into a register if it is a
6851 legitimate constant and OFFSET is 0 and memory if it isn't. */
6852 if (CONSTANT_P (op0))
6854 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6855 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6857 op0 = force_reg (mode, op0);
6859 op0 = validize_mem (force_const_mem (mode, op0));
6864 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6866 /* If this object is in memory, put it into a register.
6867 This case can't occur in C, but can in Ada if we have
6868 unchecked conversion of an expression from a scalar type to
6869 an array or record type. */
6870 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6871 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6873 tree nt = build_qualified_type (TREE_TYPE (tem),
6874 (TYPE_QUALS (TREE_TYPE (tem))
6875 | TYPE_QUAL_CONST));
6876 rtx memloc = assign_temp (nt, 1, 1, 1);
6878 mark_temp_addr_taken (memloc);
6879 emit_move_insn (memloc, op0);
6883 if (GET_CODE (op0) != MEM)
6886 if (GET_MODE (offset_rtx) != ptr_mode)
6888 #ifdef POINTERS_EXTEND_UNSIGNED
6889 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6891 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6895 /* A constant address in OP0 can have VOIDmode, we must not try
6896 to call force_reg for that case. Avoid that case. */
6897 if (GET_CODE (op0) == MEM
6898 && GET_MODE (op0) == BLKmode
6899 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6901 && (bitpos % bitsize) == 0
6902 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6903 && alignment == GET_MODE_ALIGNMENT (mode1))
6905 rtx temp = change_address (op0, mode1,
6906 plus_constant (XEXP (op0, 0),
6909 if (GET_CODE (XEXP (temp, 0)) == REG)
6912 op0 = change_address (op0, mode1,
6913 force_reg (GET_MODE (XEXP (temp, 0)),
6918 op0 = change_address (op0, VOIDmode,
6919 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6920 force_reg (ptr_mode,
6924 /* Don't forget about volatility even if this is a bitfield. */
6925 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6927 op0 = copy_rtx (op0);
6928 MEM_VOLATILE_P (op0) = 1;
6931 /* Check the access. */
6932 if (cfun != 0 && current_function_check_memory_usage
6933 && GET_CODE (op0) == MEM)
6935 enum memory_use_mode memory_usage;
6936 memory_usage = get_memory_usage_from_modifier (modifier);
6938 if (memory_usage != MEMORY_USE_DONT)
6943 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6944 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6946 /* Check the access right of the pointer. */
6947 in_check_memory_usage = 1;
6948 if (size > BITS_PER_UNIT)
6949 emit_library_call (chkr_check_addr_libfunc,
6950 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
6951 Pmode, GEN_INT (size / BITS_PER_UNIT),
6952 TYPE_MODE (sizetype),
6953 GEN_INT (memory_usage),
6954 TYPE_MODE (integer_type_node));
6955 in_check_memory_usage = 0;
6959 /* In cases where an aligned union has an unaligned object
6960 as a field, we might be extracting a BLKmode value from
6961 an integer-mode (e.g., SImode) object. Handle this case
6962 by doing the extract into an object as wide as the field
6963 (which we know to be the width of a basic mode), then
6964 storing into memory, and changing the mode to BLKmode.
6965 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6966 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6967 if (mode1 == VOIDmode
6968 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6969 || (modifier != EXPAND_CONST_ADDRESS
6970 && modifier != EXPAND_INITIALIZER
6971 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6972 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6973 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6974 /* If the field isn't aligned enough to fetch as a memref,
6975 fetch it as a bit field. */
6976 || (mode1 != BLKmode
6977 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6978 && ((TYPE_ALIGN (TREE_TYPE (tem))
6979 < GET_MODE_ALIGNMENT (mode))
6980 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6981 /* If the type and the field are a constant size and the
6982 size of the type isn't the same size as the bitfield,
6983 we must use bitfield operations. */
6985 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6987 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6989 || (modifier != EXPAND_CONST_ADDRESS
6990 && modifier != EXPAND_INITIALIZER
6992 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6993 && (TYPE_ALIGN (type) > alignment
6994 || bitpos % TYPE_ALIGN (type) != 0)))
6996 enum machine_mode ext_mode = mode;
6998 if (ext_mode == BLKmode
6999 && ! (target != 0 && GET_CODE (op0) == MEM
7000 && GET_CODE (target) == MEM
7001 && bitpos % BITS_PER_UNIT == 0))
7002 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7004 if (ext_mode == BLKmode)
7006 /* In this case, BITPOS must start at a byte boundary and
7007 TARGET, if specified, must be a MEM. */
7008 if (GET_CODE (op0) != MEM
7009 || (target != 0 && GET_CODE (target) != MEM)
7010 || bitpos % BITS_PER_UNIT != 0)
7013 op0 = change_address (op0, VOIDmode,
7014 plus_constant (XEXP (op0, 0),
7015 bitpos / BITS_PER_UNIT));
7017 target = assign_temp (type, 0, 1, 1);
7019 emit_block_move (target, op0,
7020 bitsize == -1 ? expr_size (exp)
7021 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7028 op0 = validize_mem (op0);
7030 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7031 mark_reg_pointer (XEXP (op0, 0), alignment);
7033 op0 = extract_bit_field (op0, bitsize, bitpos,
7034 unsignedp, target, ext_mode, ext_mode,
7036 int_size_in_bytes (TREE_TYPE (tem)));
7038 /* If the result is a record type and BITSIZE is narrower than
7039 the mode of OP0, an integral mode, and this is a big endian
7040 machine, we must put the field into the high-order bits. */
7041 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7042 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7043 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7044 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7045 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7049 if (mode == BLKmode)
7051 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7053 rtx new = assign_temp (nt, 0, 1, 1);
7055 emit_move_insn (new, op0);
7056 op0 = copy_rtx (new);
7057 PUT_MODE (op0, BLKmode);
7063 /* If the result is BLKmode, use that to access the object
7065 if (mode == BLKmode)
7068 /* Get a reference to just this component. */
7069 if (modifier == EXPAND_CONST_ADDRESS
7070 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7072 rtx new = gen_rtx_MEM (mode1,
7073 plus_constant (XEXP (op0, 0),
7074 (bitpos / BITS_PER_UNIT)));
7076 MEM_COPY_ATTRIBUTES (new, op0);
7080 op0 = change_address (op0, mode1,
7081 plus_constant (XEXP (op0, 0),
7082 (bitpos / BITS_PER_UNIT)));
7084 set_mem_attributes (op0, exp, 0);
7085 if (GET_CODE (XEXP (op0, 0)) == REG)
7086 mark_reg_pointer (XEXP (op0, 0), alignment);
7088 MEM_VOLATILE_P (op0) |= volatilep;
7089 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7090 || modifier == EXPAND_CONST_ADDRESS
7091 || modifier == EXPAND_INITIALIZER)
7093 else if (target == 0)
7094 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7096 convert_move (target, op0, unsignedp);
7100 /* Intended for a reference to a buffer of a file-object in Pascal.
7101 But it's not certain that a special tree code will really be
7102 necessary for these. INDIRECT_REF might work for them. */
7108 /* Pascal set IN expression.
7111 rlo = set_low - (set_low%bits_per_word);
7112 the_word = set [ (index - rlo)/bits_per_word ];
7113 bit_index = index % bits_per_word;
7114 bitmask = 1 << bit_index;
7115 return !!(the_word & bitmask); */
7117 tree set = TREE_OPERAND (exp, 0);
7118 tree index = TREE_OPERAND (exp, 1);
7119 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7120 tree set_type = TREE_TYPE (set);
7121 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7122 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7123 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7124 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7125 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7126 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7127 rtx setaddr = XEXP (setval, 0);
7128 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7130 rtx diff, quo, rem, addr, bit, result;
7132 /* If domain is empty, answer is no. Likewise if index is constant
7133 and out of bounds. */
7134 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7135 && TREE_CODE (set_low_bound) == INTEGER_CST
7136 && tree_int_cst_lt (set_high_bound, set_low_bound))
7137 || (TREE_CODE (index) == INTEGER_CST
7138 && TREE_CODE (set_low_bound) == INTEGER_CST
7139 && tree_int_cst_lt (index, set_low_bound))
7140 || (TREE_CODE (set_high_bound) == INTEGER_CST
7141 && TREE_CODE (index) == INTEGER_CST
7142 && tree_int_cst_lt (set_high_bound, index))))
7146 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7148 /* If we get here, we have to generate the code for both cases
7149 (in range and out of range). */
7151 op0 = gen_label_rtx ();
7152 op1 = gen_label_rtx ();
7154 if (! (GET_CODE (index_val) == CONST_INT
7155 && GET_CODE (lo_r) == CONST_INT))
7157 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7158 GET_MODE (index_val), iunsignedp, 0, op1);
7161 if (! (GET_CODE (index_val) == CONST_INT
7162 && GET_CODE (hi_r) == CONST_INT))
7164 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7165 GET_MODE (index_val), iunsignedp, 0, op1);
7168 /* Calculate the element number of bit zero in the first word
7170 if (GET_CODE (lo_r) == CONST_INT)
7171 rlow = GEN_INT (INTVAL (lo_r)
7172 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7174 rlow = expand_binop (index_mode, and_optab, lo_r,
7175 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7176 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7178 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7179 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7181 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7182 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7183 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7184 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7186 addr = memory_address (byte_mode,
7187 expand_binop (index_mode, add_optab, diff,
7188 setaddr, NULL_RTX, iunsignedp,
7191 /* Extract the bit we want to examine. */
7192 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7193 gen_rtx_MEM (byte_mode, addr),
7194 make_tree (TREE_TYPE (index), rem),
7196 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7197 GET_MODE (target) == byte_mode ? target : 0,
7198 1, OPTAB_LIB_WIDEN);
7200 if (result != target)
7201 convert_move (target, result, 1);
7203 /* Output the code to handle the out-of-range case. */
7206 emit_move_insn (target, const0_rtx);
7211 case WITH_CLEANUP_EXPR:
7212 if (RTL_EXPR_RTL (exp) == 0)
7215 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7216 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7218 /* That's it for this cleanup. */
7219 TREE_OPERAND (exp, 2) = 0;
7221 return RTL_EXPR_RTL (exp);
7223 case CLEANUP_POINT_EXPR:
7225 /* Start a new binding layer that will keep track of all cleanup
7226 actions to be performed. */
7227 expand_start_bindings (2);
7229 target_temp_slot_level = temp_slot_level;
7231 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7232 /* If we're going to use this value, load it up now. */
7234 op0 = force_not_mem (op0);
7235 preserve_temp_slots (op0);
7236 expand_end_bindings (NULL_TREE, 0, 0);
7241 /* Check for a built-in function. */
7242 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7243 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7245 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7247 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7248 == BUILT_IN_FRONTEND)
7249 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7251 return expand_builtin (exp, target, subtarget, tmode, ignore);
7254 return expand_call (exp, target, ignore);
7256 case NON_LVALUE_EXPR:
7259 case REFERENCE_EXPR:
7260 if (TREE_OPERAND (exp, 0) == error_mark_node)
7263 if (TREE_CODE (type) == UNION_TYPE)
7265 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7267 /* If both input and output are BLKmode, this conversion
7268 isn't actually doing anything unless we need to make the
7269 alignment stricter. */
7270 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7271 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7272 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7273 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7277 target = assign_temp (type, 0, 1, 1);
7279 if (GET_CODE (target) == MEM)
7280 /* Store data into beginning of memory target. */
7281 store_expr (TREE_OPERAND (exp, 0),
7282 change_address (target, TYPE_MODE (valtype), 0), 0);
7284 else if (GET_CODE (target) == REG)
7285 /* Store this field into a union of the proper type. */
7286 store_field (target,
7287 MIN ((int_size_in_bytes (TREE_TYPE
7288 (TREE_OPERAND (exp, 0)))
7290 GET_MODE_BITSIZE (mode)),
7291 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7292 VOIDmode, 0, BITS_PER_UNIT,
7293 int_size_in_bytes (type), 0);
7297 /* Return the entire union. */
7301 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7303 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7306 /* If the signedness of the conversion differs and OP0 is
7307 a promoted SUBREG, clear that indication since we now
7308 have to do the proper extension. */
7309 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7310 && GET_CODE (op0) == SUBREG)
7311 SUBREG_PROMOTED_VAR_P (op0) = 0;
7316 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7317 if (GET_MODE (op0) == mode)
7320 /* If OP0 is a constant, just convert it into the proper mode. */
7321 if (CONSTANT_P (op0))
7323 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7324 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7326 if (modifier == EXPAND_INITIALIZER)
7327 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7331 convert_to_mode (mode, op0,
7332 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7334 convert_move (target, op0,
7335 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7339 /* We come here from MINUS_EXPR when the second operand is a
7342 this_optab = ! unsignedp && flag_trapv
7343 && (GET_MODE_CLASS(mode) == MODE_INT)
7344 ? addv_optab : add_optab;
7346 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7347 something else, make sure we add the register to the constant and
7348 then to the other thing. This case can occur during strength
7349 reduction and doing it this way will produce better code if the
7350 frame pointer or argument pointer is eliminated.
7352 fold-const.c will ensure that the constant is always in the inner
7353 PLUS_EXPR, so the only case we need to do anything about is if
7354 sp, ap, or fp is our second argument, in which case we must swap
7355 the innermost first argument and our second argument. */
7357 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7358 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7359 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7360 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7361 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7362 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7364 tree t = TREE_OPERAND (exp, 1);
7366 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7367 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7370 /* If the result is to be ptr_mode and we are adding an integer to
7371 something, we might be forming a constant. So try to use
7372 plus_constant. If it produces a sum and we can't accept it,
7373 use force_operand. This allows P = &ARR[const] to generate
7374 efficient code on machines where a SYMBOL_REF is not a valid
7377 If this is an EXPAND_SUM call, always return the sum. */
7378 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7379 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7381 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7382 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7383 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7387 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7389 /* Use immed_double_const to ensure that the constant is
7390 truncated according to the mode of OP1, then sign extended
7391 to a HOST_WIDE_INT. Using the constant directly can result
7392 in non-canonical RTL in a 64x32 cross compile. */
7394 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7396 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7397 op1 = plus_constant (op1, INTVAL (constant_part));
7398 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7399 op1 = force_operand (op1, target);
7403 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7404 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7405 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7409 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7411 if (! CONSTANT_P (op0))
7413 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7414 VOIDmode, modifier);
7415 /* Don't go to both_summands if modifier
7416 says it's not right to return a PLUS. */
7417 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7421 /* Use immed_double_const to ensure that the constant is
7422 truncated according to the mode of OP1, then sign extended
7423 to a HOST_WIDE_INT. Using the constant directly can result
7424 in non-canonical RTL in a 64x32 cross compile. */
7426 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7428 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7429 op0 = plus_constant (op0, INTVAL (constant_part));
7430 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7431 op0 = force_operand (op0, target);
7436 /* No sense saving up arithmetic to be done
7437 if it's all in the wrong mode to form part of an address.
7438 And force_operand won't know whether to sign-extend or
7440 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7441 || mode != ptr_mode)
7444 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7447 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7448 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7451 /* Make sure any term that's a sum with a constant comes last. */
7452 if (GET_CODE (op0) == PLUS
7453 && CONSTANT_P (XEXP (op0, 1)))
7459 /* If adding to a sum including a constant,
7460 associate it to put the constant outside. */
7461 if (GET_CODE (op1) == PLUS
7462 && CONSTANT_P (XEXP (op1, 1)))
7464 rtx constant_term = const0_rtx;
7466 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7469 /* Ensure that MULT comes first if there is one. */
7470 else if (GET_CODE (op0) == MULT)
7471 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7473 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7475 /* Let's also eliminate constants from op0 if possible. */
7476 op0 = eliminate_constant_term (op0, &constant_term);
7478 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7479 their sum should be a constant. Form it into OP1, since the
7480 result we want will then be OP0 + OP1. */
7482 temp = simplify_binary_operation (PLUS, mode, constant_term,
7487 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7490 /* Put a constant term last and put a multiplication first. */
7491 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7492 temp = op1, op1 = op0, op0 = temp;
7494 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7495 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7498 /* For initializers, we are allowed to return a MINUS of two
7499 symbolic constants. Here we handle all cases when both operands
7501 /* Handle difference of two symbolic constants,
7502 for the sake of an initializer. */
7503 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7504 && really_constant_p (TREE_OPERAND (exp, 0))
7505 && really_constant_p (TREE_OPERAND (exp, 1)))
7507 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7508 VOIDmode, ro_modifier);
7509 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7510 VOIDmode, ro_modifier);
7512 /* If the last operand is a CONST_INT, use plus_constant of
7513 the negated constant. Else make the MINUS. */
7514 if (GET_CODE (op1) == CONST_INT)
7515 return plus_constant (op0, - INTVAL (op1));
7517 return gen_rtx_MINUS (mode, op0, op1);
7519 /* Convert A - const to A + (-const). */
7520 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7522 tree negated = fold (build1 (NEGATE_EXPR, type,
7523 TREE_OPERAND (exp, 1)));
7525 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7526 /* If we can't negate the constant in TYPE, leave it alone and
7527 expand_binop will negate it for us. We used to try to do it
7528 here in the signed version of TYPE, but that doesn't work
7529 on POINTER_TYPEs. */;
7532 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7536 this_optab = ! unsignedp && flag_trapv
7537 && (GET_MODE_CLASS(mode) == MODE_INT)
7538 ? subv_optab : sub_optab;
7542 /* If first operand is constant, swap them.
7543 Thus the following special case checks need only
7544 check the second operand. */
7545 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7547 register tree t1 = TREE_OPERAND (exp, 0);
7548 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7549 TREE_OPERAND (exp, 1) = t1;
7552 /* Attempt to return something suitable for generating an
7553 indexed address, for machines that support that. */
7555 if (modifier == EXPAND_SUM && mode == ptr_mode
7556 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7557 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7559 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7562 /* Apply distributive law if OP0 is x+c. */
7563 if (GET_CODE (op0) == PLUS
7564 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7569 (mode, XEXP (op0, 0),
7570 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7571 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7572 * INTVAL (XEXP (op0, 1))));
7574 if (GET_CODE (op0) != REG)
7575 op0 = force_operand (op0, NULL_RTX);
7576 if (GET_CODE (op0) != REG)
7577 op0 = copy_to_mode_reg (mode, op0);
7580 gen_rtx_MULT (mode, op0,
7581 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7584 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7587 /* Check for multiplying things that have been extended
7588 from a narrower type. If this machine supports multiplying
7589 in that narrower type with a result in the desired type,
7590 do it that way, and avoid the explicit type-conversion. */
7591 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7592 && TREE_CODE (type) == INTEGER_TYPE
7593 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7594 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7595 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7596 && int_fits_type_p (TREE_OPERAND (exp, 1),
7597 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7598 /* Don't use a widening multiply if a shift will do. */
7599 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7600 > HOST_BITS_PER_WIDE_INT)
7601 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7603 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7604 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7606 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7607 /* If both operands are extended, they must either both
7608 be zero-extended or both be sign-extended. */
7609 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7611 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7613 enum machine_mode innermode
7614 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7615 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7616 ? smul_widen_optab : umul_widen_optab);
7617 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7618 ? umul_widen_optab : smul_widen_optab);
7619 if (mode == GET_MODE_WIDER_MODE (innermode))
7621 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7623 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7624 NULL_RTX, VOIDmode, 0);
7625 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7626 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7629 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7630 NULL_RTX, VOIDmode, 0);
7633 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7634 && innermode == word_mode)
7637 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7638 NULL_RTX, VOIDmode, 0);
7639 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7640 op1 = convert_modes (innermode, mode,
7641 expand_expr (TREE_OPERAND (exp, 1),
7642 NULL_RTX, VOIDmode, 0),
7645 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7646 NULL_RTX, VOIDmode, 0);
7647 temp = expand_binop (mode, other_optab, op0, op1, target,
7648 unsignedp, OPTAB_LIB_WIDEN);
7649 htem = expand_mult_highpart_adjust (innermode,
7650 gen_highpart (innermode, temp),
7652 gen_highpart (innermode, temp),
7654 emit_move_insn (gen_highpart (innermode, temp), htem);
7659 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7660 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7661 return expand_mult (mode, op0, op1, target, unsignedp);
7663 case TRUNC_DIV_EXPR:
7664 case FLOOR_DIV_EXPR:
7666 case ROUND_DIV_EXPR:
7667 case EXACT_DIV_EXPR:
7668 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7670 /* Possible optimization: compute the dividend with EXPAND_SUM
7671 then if the divisor is constant can optimize the case
7672 where some terms of the dividend have coeffs divisible by it. */
7673 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7674 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7675 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7678 this_optab = flodiv_optab;
7681 case TRUNC_MOD_EXPR:
7682 case FLOOR_MOD_EXPR:
7684 case ROUND_MOD_EXPR:
7685 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7687 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7688 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7689 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7691 case FIX_ROUND_EXPR:
7692 case FIX_FLOOR_EXPR:
7694 abort (); /* Not used for C. */
7696 case FIX_TRUNC_EXPR:
7697 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7699 target = gen_reg_rtx (mode);
7700 expand_fix (target, op0, unsignedp);
7704 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7706 target = gen_reg_rtx (mode);
7707 /* expand_float can't figure out what to do if FROM has VOIDmode.
7708 So give it the correct mode. With -O, cse will optimize this. */
7709 if (GET_MODE (op0) == VOIDmode)
7710 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7712 expand_float (target, op0,
7713 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7717 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7718 temp = expand_unop (mode,
7719 ! unsignedp && flag_trapv
7720 && (GET_MODE_CLASS(mode) == MODE_INT)
7721 ? negv_optab : neg_optab, op0, target, 0);
7727 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7729 /* Handle complex values specially. */
7730 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7731 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7732 return expand_complex_abs (mode, op0, target, unsignedp);
7734 /* Unsigned abs is simply the operand. Testing here means we don't
7735 risk generating incorrect code below. */
7736 if (TREE_UNSIGNED (type))
7739 return expand_abs (mode, op0, target, unsignedp,
7740 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7744 target = original_target;
7745 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7746 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7747 || GET_MODE (target) != mode
7748 || (GET_CODE (target) == REG
7749 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7750 target = gen_reg_rtx (mode);
7751 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7752 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7754 /* First try to do it with a special MIN or MAX instruction.
7755 If that does not win, use a conditional jump to select the proper
7757 this_optab = (TREE_UNSIGNED (type)
7758 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7759 : (code == MIN_EXPR ? smin_optab : smax_optab));
7761 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7766 /* At this point, a MEM target is no longer useful; we will get better
7769 if (GET_CODE (target) == MEM)
7770 target = gen_reg_rtx (mode);
7773 emit_move_insn (target, op0);
7775 op0 = gen_label_rtx ();
7777 /* If this mode is an integer too wide to compare properly,
7778 compare word by word. Rely on cse to optimize constant cases. */
7779 if (GET_MODE_CLASS (mode) == MODE_INT
7780 && ! can_compare_p (GE, mode, ccp_jump))
7782 if (code == MAX_EXPR)
7783 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7784 target, op1, NULL_RTX, op0);
7786 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7787 op1, target, NULL_RTX, op0);
7791 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7792 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7793 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7796 emit_move_insn (target, op1);
7801 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7802 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7808 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7809 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7814 /* ??? Can optimize bitwise operations with one arg constant.
7815 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7816 and (a bitwise1 b) bitwise2 b (etc)
7817 but that is probably not worth while. */
7819 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7820 boolean values when we want in all cases to compute both of them. In
7821 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7822 as actual zero-or-1 values and then bitwise anding. In cases where
7823 there cannot be any side effects, better code would be made by
7824 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7825 how to recognize those cases. */
7827 case TRUTH_AND_EXPR:
7829 this_optab = and_optab;
7834 this_optab = ior_optab;
7837 case TRUTH_XOR_EXPR:
7839 this_optab = xor_optab;
7846 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7848 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7849 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7852 /* Could determine the answer when only additive constants differ. Also,
7853 the addition of one can be handled by changing the condition. */
7860 case UNORDERED_EXPR:
7867 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7871 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7872 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7874 && GET_CODE (original_target) == REG
7875 && (GET_MODE (original_target)
7876 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7878 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7881 if (temp != original_target)
7882 temp = copy_to_reg (temp);
7884 op1 = gen_label_rtx ();
7885 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7886 GET_MODE (temp), unsignedp, 0, op1);
7887 emit_move_insn (temp, const1_rtx);
7892 /* If no set-flag instruction, must generate a conditional
7893 store into a temporary variable. Drop through
7894 and handle this like && and ||. */
7896 case TRUTH_ANDIF_EXPR:
7897 case TRUTH_ORIF_EXPR:
7899 && (target == 0 || ! safe_from_p (target, exp, 1)
7900 /* Make sure we don't have a hard reg (such as function's return
7901 value) live across basic blocks, if not optimizing. */
7902 || (!optimize && GET_CODE (target) == REG
7903 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7904 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7907 emit_clr_insn (target);
7909 op1 = gen_label_rtx ();
7910 jumpifnot (exp, op1);
7913 emit_0_to_1_insn (target);
7916 return ignore ? const0_rtx : target;
7918 case TRUTH_NOT_EXPR:
7919 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7920 /* The parser is careful to generate TRUTH_NOT_EXPR
7921 only with operands that are always zero or one. */
7922 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7923 target, 1, OPTAB_LIB_WIDEN);
7929 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7931 return expand_expr (TREE_OPERAND (exp, 1),
7932 (ignore ? const0_rtx : target),
7936 /* If we would have a "singleton" (see below) were it not for a
7937 conversion in each arm, bring that conversion back out. */
7938 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7939 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7940 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7941 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7943 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7944 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7946 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7947 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7948 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7949 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7950 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7951 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7952 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7953 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7954 return expand_expr (build1 (NOP_EXPR, type,
7955 build (COND_EXPR, TREE_TYPE (true),
7956 TREE_OPERAND (exp, 0),
7958 target, tmode, modifier);
7962 /* Note that COND_EXPRs whose type is a structure or union
7963 are required to be constructed to contain assignments of
7964 a temporary variable, so that we can evaluate them here
7965 for side effect only. If type is void, we must do likewise. */
7967 /* If an arm of the branch requires a cleanup,
7968 only that cleanup is performed. */
7971 tree binary_op = 0, unary_op = 0;
7973 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7974 convert it to our mode, if necessary. */
7975 if (integer_onep (TREE_OPERAND (exp, 1))
7976 && integer_zerop (TREE_OPERAND (exp, 2))
7977 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7981 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7986 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7987 if (GET_MODE (op0) == mode)
7991 target = gen_reg_rtx (mode);
7992 convert_move (target, op0, unsignedp);
7996 /* Check for X ? A + B : A. If we have this, we can copy A to the
7997 output and conditionally add B. Similarly for unary operations.
7998 Don't do this if X has side-effects because those side effects
7999 might affect A or B and the "?" operation is a sequence point in
8000 ANSI. (operand_equal_p tests for side effects.) */
8002 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8003 && operand_equal_p (TREE_OPERAND (exp, 2),
8004 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8005 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8006 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8007 && operand_equal_p (TREE_OPERAND (exp, 1),
8008 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8009 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8010 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8011 && operand_equal_p (TREE_OPERAND (exp, 2),
8012 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8013 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8014 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8015 && operand_equal_p (TREE_OPERAND (exp, 1),
8016 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8017 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8019 /* If we are not to produce a result, we have no target. Otherwise,
8020 if a target was specified use it; it will not be used as an
8021 intermediate target unless it is safe. If no target, use a
8026 else if (original_target
8027 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8028 || (singleton && GET_CODE (original_target) == REG
8029 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8030 && original_target == var_rtx (singleton)))
8031 && GET_MODE (original_target) == mode
8032 #ifdef HAVE_conditional_move
8033 && (! can_conditionally_move_p (mode)
8034 || GET_CODE (original_target) == REG
8035 || TREE_ADDRESSABLE (type))
8037 && ! (GET_CODE (original_target) == MEM
8038 && MEM_VOLATILE_P (original_target)))
8039 temp = original_target;
8040 else if (TREE_ADDRESSABLE (type))
8043 temp = assign_temp (type, 0, 0, 1);
8045 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8046 do the test of X as a store-flag operation, do this as
8047 A + ((X != 0) << log C). Similarly for other simple binary
8048 operators. Only do for C == 1 if BRANCH_COST is low. */
8049 if (temp && singleton && binary_op
8050 && (TREE_CODE (binary_op) == PLUS_EXPR
8051 || TREE_CODE (binary_op) == MINUS_EXPR
8052 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8053 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8054 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8055 : integer_onep (TREE_OPERAND (binary_op, 1)))
8056 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8059 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8060 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8061 ? addv_optab : add_optab)
8062 : TREE_CODE (binary_op) == MINUS_EXPR
8063 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8064 ? subv_optab : sub_optab)
8065 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8068 /* If we had X ? A : A + 1, do this as A + (X == 0).
8070 We have to invert the truth value here and then put it
8071 back later if do_store_flag fails. We cannot simply copy
8072 TREE_OPERAND (exp, 0) to another variable and modify that
8073 because invert_truthvalue can modify the tree pointed to
8075 if (singleton == TREE_OPERAND (exp, 1))
8076 TREE_OPERAND (exp, 0)
8077 = invert_truthvalue (TREE_OPERAND (exp, 0));
8079 result = do_store_flag (TREE_OPERAND (exp, 0),
8080 (safe_from_p (temp, singleton, 1)
8082 mode, BRANCH_COST <= 1);
8084 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8085 result = expand_shift (LSHIFT_EXPR, mode, result,
8086 build_int_2 (tree_log2
8090 (safe_from_p (temp, singleton, 1)
8091 ? temp : NULL_RTX), 0);
8095 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8096 return expand_binop (mode, boptab, op1, result, temp,
8097 unsignedp, OPTAB_LIB_WIDEN);
8099 else if (singleton == TREE_OPERAND (exp, 1))
8100 TREE_OPERAND (exp, 0)
8101 = invert_truthvalue (TREE_OPERAND (exp, 0));
8104 do_pending_stack_adjust ();
8106 op0 = gen_label_rtx ();
8108 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8112 /* If the target conflicts with the other operand of the
8113 binary op, we can't use it. Also, we can't use the target
8114 if it is a hard register, because evaluating the condition
8115 might clobber it. */
8117 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8118 || (GET_CODE (temp) == REG
8119 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8120 temp = gen_reg_rtx (mode);
8121 store_expr (singleton, temp, 0);
8124 expand_expr (singleton,
8125 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8126 if (singleton == TREE_OPERAND (exp, 1))
8127 jumpif (TREE_OPERAND (exp, 0), op0);
8129 jumpifnot (TREE_OPERAND (exp, 0), op0);
8131 start_cleanup_deferral ();
8132 if (binary_op && temp == 0)
8133 /* Just touch the other operand. */
8134 expand_expr (TREE_OPERAND (binary_op, 1),
8135 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8137 store_expr (build (TREE_CODE (binary_op), type,
8138 make_tree (type, temp),
8139 TREE_OPERAND (binary_op, 1)),
8142 store_expr (build1 (TREE_CODE (unary_op), type,
8143 make_tree (type, temp)),
8147 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8148 comparison operator. If we have one of these cases, set the
8149 output to A, branch on A (cse will merge these two references),
8150 then set the output to FOO. */
8152 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8153 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8154 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8155 TREE_OPERAND (exp, 1), 0)
8156 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8157 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8158 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8160 if (GET_CODE (temp) == REG
8161 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8162 temp = gen_reg_rtx (mode);
8163 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8164 jumpif (TREE_OPERAND (exp, 0), op0);
8166 start_cleanup_deferral ();
8167 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8171 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8172 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8173 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8174 TREE_OPERAND (exp, 2), 0)
8175 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8176 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8177 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8179 if (GET_CODE (temp) == REG
8180 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8181 temp = gen_reg_rtx (mode);
8182 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8183 jumpifnot (TREE_OPERAND (exp, 0), op0);
8185 start_cleanup_deferral ();
8186 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8191 op1 = gen_label_rtx ();
8192 jumpifnot (TREE_OPERAND (exp, 0), op0);
8194 start_cleanup_deferral ();
8196 /* One branch of the cond can be void, if it never returns. For
8197 example A ? throw : E */
8199 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8200 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8202 expand_expr (TREE_OPERAND (exp, 1),
8203 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8204 end_cleanup_deferral ();
8206 emit_jump_insn (gen_jump (op1));
8209 start_cleanup_deferral ();
8211 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8212 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8214 expand_expr (TREE_OPERAND (exp, 2),
8215 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8218 end_cleanup_deferral ();
8229 /* Something needs to be initialized, but we didn't know
8230 where that thing was when building the tree. For example,
8231 it could be the return value of a function, or a parameter
8232 to a function which lays down in the stack, or a temporary
8233 variable which must be passed by reference.
8235 We guarantee that the expression will either be constructed
8236 or copied into our original target. */
8238 tree slot = TREE_OPERAND (exp, 0);
8239 tree cleanups = NULL_TREE;
8242 if (TREE_CODE (slot) != VAR_DECL)
8246 target = original_target;
8248 /* Set this here so that if we get a target that refers to a
8249 register variable that's already been used, put_reg_into_stack
8250 knows that it should fix up those uses. */
8251 TREE_USED (slot) = 1;
8255 if (DECL_RTL (slot) != 0)
8257 target = DECL_RTL (slot);
8258 /* If we have already expanded the slot, so don't do
8260 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8265 target = assign_temp (type, 2, 0, 1);
8266 /* All temp slots at this level must not conflict. */
8267 preserve_temp_slots (target);
8268 DECL_RTL (slot) = target;
8269 if (TREE_ADDRESSABLE (slot))
8270 put_var_into_stack (slot);
8272 /* Since SLOT is not known to the called function
8273 to belong to its stack frame, we must build an explicit
8274 cleanup. This case occurs when we must build up a reference
8275 to pass the reference as an argument. In this case,
8276 it is very likely that such a reference need not be
8279 if (TREE_OPERAND (exp, 2) == 0)
8280 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8281 cleanups = TREE_OPERAND (exp, 2);
8286 /* This case does occur, when expanding a parameter which
8287 needs to be constructed on the stack. The target
8288 is the actual stack address that we want to initialize.
8289 The function we call will perform the cleanup in this case. */
8291 /* If we have already assigned it space, use that space,
8292 not target that we were passed in, as our target
8293 parameter is only a hint. */
8294 if (DECL_RTL (slot) != 0)
8296 target = DECL_RTL (slot);
8297 /* If we have already expanded the slot, so don't do
8299 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8304 DECL_RTL (slot) = target;
8305 /* If we must have an addressable slot, then make sure that
8306 the RTL that we just stored in slot is OK. */
8307 if (TREE_ADDRESSABLE (slot))
8308 put_var_into_stack (slot);
8312 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8313 /* Mark it as expanded. */
8314 TREE_OPERAND (exp, 1) = NULL_TREE;
8316 store_expr (exp1, target, 0);
8318 expand_decl_cleanup (NULL_TREE, cleanups);
8325 tree lhs = TREE_OPERAND (exp, 0);
8326 tree rhs = TREE_OPERAND (exp, 1);
8327 tree noncopied_parts = 0;
8328 tree lhs_type = TREE_TYPE (lhs);
8330 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8331 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8332 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8333 TYPE_NONCOPIED_PARTS (lhs_type));
8334 while (noncopied_parts != 0)
8336 expand_assignment (TREE_VALUE (noncopied_parts),
8337 TREE_PURPOSE (noncopied_parts), 0, 0);
8338 noncopied_parts = TREE_CHAIN (noncopied_parts);
8345 /* If lhs is complex, expand calls in rhs before computing it.
8346 That's so we don't compute a pointer and save it over a call.
8347 If lhs is simple, compute it first so we can give it as a
8348 target if the rhs is just a call. This avoids an extra temp and copy
8349 and that prevents a partial-subsumption which makes bad code.
8350 Actually we could treat component_ref's of vars like vars. */
8352 tree lhs = TREE_OPERAND (exp, 0);
8353 tree rhs = TREE_OPERAND (exp, 1);
8354 tree noncopied_parts = 0;
8355 tree lhs_type = TREE_TYPE (lhs);
8359 if (TREE_CODE (lhs) != VAR_DECL
8360 && TREE_CODE (lhs) != RESULT_DECL
8361 && TREE_CODE (lhs) != PARM_DECL
8362 && ! (TREE_CODE (lhs) == INDIRECT_REF
8363 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8365 /* Check for |= or &= of a bitfield of size one into another bitfield
8366 of size 1. In this case, (unless we need the result of the
8367 assignment) we can do this more efficiently with a
8368 test followed by an assignment, if necessary.
8370 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8371 things change so we do, this code should be enhanced to
8374 && TREE_CODE (lhs) == COMPONENT_REF
8375 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8376 || TREE_CODE (rhs) == BIT_AND_EXPR)
8377 && TREE_OPERAND (rhs, 0) == lhs
8378 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8379 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8380 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8382 rtx label = gen_label_rtx ();
8384 do_jump (TREE_OPERAND (rhs, 1),
8385 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8386 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8387 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8388 (TREE_CODE (rhs) == BIT_IOR_EXPR
8390 : integer_zero_node)),
8392 do_pending_stack_adjust ();
8397 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8398 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8399 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8400 TYPE_NONCOPIED_PARTS (lhs_type));
8402 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8403 while (noncopied_parts != 0)
8405 expand_assignment (TREE_PURPOSE (noncopied_parts),
8406 TREE_VALUE (noncopied_parts), 0, 0);
8407 noncopied_parts = TREE_CHAIN (noncopied_parts);
8413 if (!TREE_OPERAND (exp, 0))
8414 expand_null_return ();
8416 expand_return (TREE_OPERAND (exp, 0));
8419 case PREINCREMENT_EXPR:
8420 case PREDECREMENT_EXPR:
8421 return expand_increment (exp, 0, ignore);
8423 case POSTINCREMENT_EXPR:
8424 case POSTDECREMENT_EXPR:
8425 /* Faster to treat as pre-increment if result is not used. */
8426 return expand_increment (exp, ! ignore, ignore);
8429 /* If nonzero, TEMP will be set to the address of something that might
8430 be a MEM corresponding to a stack slot. */
8433 /* Are we taking the address of a nested function? */
8434 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8435 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8436 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8437 && ! TREE_STATIC (exp))
8439 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8440 op0 = force_operand (op0, target);
8442 /* If we are taking the address of something erroneous, just
8444 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8448 /* We make sure to pass const0_rtx down if we came in with
8449 ignore set, to avoid doing the cleanups twice for something. */
8450 op0 = expand_expr (TREE_OPERAND (exp, 0),
8451 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8452 (modifier == EXPAND_INITIALIZER
8453 ? modifier : EXPAND_CONST_ADDRESS));
8455 /* If we are going to ignore the result, OP0 will have been set
8456 to const0_rtx, so just return it. Don't get confused and
8457 think we are taking the address of the constant. */
8461 op0 = protect_from_queue (op0, 0);
8463 /* We would like the object in memory. If it is a constant, we can
8464 have it be statically allocated into memory. For a non-constant,
8465 we need to allocate some memory and store the value into it. */
8467 if (CONSTANT_P (op0))
8468 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8470 else if (GET_CODE (op0) == MEM)
8472 mark_temp_addr_taken (op0);
8473 temp = XEXP (op0, 0);
8476 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8477 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8478 || GET_CODE (op0) == PARALLEL)
8480 /* If this object is in a register, it must be not
8482 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8483 tree nt = build_qualified_type (inner_type,
8484 (TYPE_QUALS (inner_type)
8485 | TYPE_QUAL_CONST));
8486 rtx memloc = assign_temp (nt, 1, 1, 1);
8488 mark_temp_addr_taken (memloc);
8489 if (GET_CODE (op0) == PARALLEL)
8490 /* Handle calls that pass values in multiple non-contiguous
8491 locations. The Irix 6 ABI has examples of this. */
8492 emit_group_store (memloc, op0,
8493 int_size_in_bytes (inner_type),
8494 TYPE_ALIGN (inner_type));
8496 emit_move_insn (memloc, op0);
8500 if (GET_CODE (op0) != MEM)
8503 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8505 temp = XEXP (op0, 0);
8506 #ifdef POINTERS_EXTEND_UNSIGNED
8507 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8508 && mode == ptr_mode)
8509 temp = convert_memory_address (ptr_mode, temp);
8514 op0 = force_operand (XEXP (op0, 0), target);
8517 if (flag_force_addr && GET_CODE (op0) != REG)
8518 op0 = force_reg (Pmode, op0);
8520 if (GET_CODE (op0) == REG
8521 && ! REG_USERVAR_P (op0))
8522 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8524 /* If we might have had a temp slot, add an equivalent address
8527 update_temp_slot_address (temp, op0);
8529 #ifdef POINTERS_EXTEND_UNSIGNED
8530 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8531 && mode == ptr_mode)
8532 op0 = convert_memory_address (ptr_mode, op0);
8537 case ENTRY_VALUE_EXPR:
8540 /* COMPLEX type for Extended Pascal & Fortran */
8543 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8546 /* Get the rtx code of the operands. */
8547 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8548 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8551 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8555 /* Move the real (op0) and imaginary (op1) parts to their location. */
8556 emit_move_insn (gen_realpart (mode, target), op0);
8557 emit_move_insn (gen_imagpart (mode, target), op1);
8559 insns = get_insns ();
8562 /* Complex construction should appear as a single unit. */
8563 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8564 each with a separate pseudo as destination.
8565 It's not correct for flow to treat them as a unit. */
8566 if (GET_CODE (target) != CONCAT)
8567 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8575 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8576 return gen_realpart (mode, op0);
8579 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8580 return gen_imagpart (mode, op0);
8584 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8588 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8591 target = gen_reg_rtx (mode);
8595 /* Store the realpart and the negated imagpart to target. */
8596 emit_move_insn (gen_realpart (partmode, target),
8597 gen_realpart (partmode, op0));
8599 imag_t = gen_imagpart (partmode, target);
8600 temp = expand_unop (partmode,
8601 ! unsignedp && flag_trapv
8602 && (GET_MODE_CLASS(partmode) == MODE_INT)
8603 ? negv_optab : neg_optab,
8604 gen_imagpart (partmode, op0), imag_t, 0);
8606 emit_move_insn (imag_t, temp);
8608 insns = get_insns ();
8611 /* Conjugate should appear as a single unit
8612 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8613 each with a separate pseudo as destination.
8614 It's not correct for flow to treat them as a unit. */
8615 if (GET_CODE (target) != CONCAT)
8616 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8623 case TRY_CATCH_EXPR:
8625 tree handler = TREE_OPERAND (exp, 1);
8627 expand_eh_region_start ();
8629 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8631 expand_eh_region_end (handler);
8636 case TRY_FINALLY_EXPR:
8638 tree try_block = TREE_OPERAND (exp, 0);
8639 tree finally_block = TREE_OPERAND (exp, 1);
8640 rtx finally_label = gen_label_rtx ();
8641 rtx done_label = gen_label_rtx ();
8642 rtx return_link = gen_reg_rtx (Pmode);
8643 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8644 (tree) finally_label, (tree) return_link);
8645 TREE_SIDE_EFFECTS (cleanup) = 1;
8647 /* Start a new binding layer that will keep track of all cleanup
8648 actions to be performed. */
8649 expand_start_bindings (2);
8651 target_temp_slot_level = temp_slot_level;
8653 expand_decl_cleanup (NULL_TREE, cleanup);
8654 op0 = expand_expr (try_block, target, tmode, modifier);
8656 preserve_temp_slots (op0);
8657 expand_end_bindings (NULL_TREE, 0, 0);
8658 emit_jump (done_label);
8659 emit_label (finally_label);
8660 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8661 emit_indirect_jump (return_link);
8662 emit_label (done_label);
8666 case GOTO_SUBROUTINE_EXPR:
8668 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8669 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8670 rtx return_address = gen_label_rtx ();
8671 emit_move_insn (return_link,
8672 gen_rtx_LABEL_REF (Pmode, return_address));
8674 emit_label (return_address);
8680 rtx dcc = get_dynamic_cleanup_chain ();
8681 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8687 rtx dhc = get_dynamic_handler_chain ();
8688 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8693 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8696 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8699 /* Here to do an ordinary binary operator, generating an instruction
8700 from the optab already placed in `this_optab'. */
8702 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8704 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8705 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8707 temp = expand_binop (mode, this_optab, op0, op1, target,
8708 unsignedp, OPTAB_LIB_WIDEN);
8714 /* Similar to expand_expr, except that we don't specify a target, target
8715 mode, or modifier and we return the alignment of the inner type. This is
8716 used in cases where it is not necessary to align the result to the
8717 alignment of its type as long as we know the alignment of the result, for
8718 example for comparisons of BLKmode values. */
8721 expand_expr_unaligned (exp, palign)
8723 unsigned int *palign;
8726 tree type = TREE_TYPE (exp);
8727 register enum machine_mode mode = TYPE_MODE (type);
8729 /* Default the alignment we return to that of the type. */
8730 *palign = TYPE_ALIGN (type);
8732 /* The only cases in which we do anything special is if the resulting mode
8734 if (mode != BLKmode)
8735 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8737 switch (TREE_CODE (exp))
8741 case NON_LVALUE_EXPR:
8742 /* Conversions between BLKmode values don't change the underlying
8743 alignment or value. */
8744 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8745 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8749 /* Much of the code for this case is copied directly from expand_expr.
8750 We need to duplicate it here because we will do something different
8751 in the fall-through case, so we need to handle the same exceptions
8754 tree array = TREE_OPERAND (exp, 0);
8755 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8756 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8757 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8760 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8763 /* Optimize the special-case of a zero lower bound.
8765 We convert the low_bound to sizetype to avoid some problems
8766 with constant folding. (E.g. suppose the lower bound is 1,
8767 and its mode is QI. Without the conversion, (ARRAY
8768 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8769 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8771 if (! integer_zerop (low_bound))
8772 index = size_diffop (index, convert (sizetype, low_bound));
8774 /* If this is a constant index into a constant array,
8775 just get the value from the array. Handle both the cases when
8776 we have an explicit constructor and when our operand is a variable
8777 that was declared const. */
8779 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8780 && host_integerp (index, 0)
8781 && 0 > compare_tree_int (index,
8782 list_length (CONSTRUCTOR_ELTS
8783 (TREE_OPERAND (exp, 0)))))
8787 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8788 i = tree_low_cst (index, 0);
8789 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8793 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8796 else if (optimize >= 1
8797 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8798 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8799 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8801 if (TREE_CODE (index) == INTEGER_CST)
8803 tree init = DECL_INITIAL (array);
8805 if (TREE_CODE (init) == CONSTRUCTOR)
8809 for (elem = CONSTRUCTOR_ELTS (init);
8810 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8811 elem = TREE_CHAIN (elem))
8815 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8825 /* If the operand is a CONSTRUCTOR, we can just extract the
8826 appropriate field if it is present. Don't do this if we have
8827 already written the data since we want to refer to that copy
8828 and varasm.c assumes that's what we'll do. */
8829 if (TREE_CODE (exp) != ARRAY_REF
8830 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8831 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8835 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8836 elt = TREE_CHAIN (elt))
8837 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8838 /* Note that unlike the case in expand_expr, we know this is
8839 BLKmode and hence not an integer. */
8840 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8844 enum machine_mode mode1;
8845 HOST_WIDE_INT bitsize, bitpos;
8848 unsigned int alignment;
8850 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8851 &mode1, &unsignedp, &volatilep,
8854 /* If we got back the original object, something is wrong. Perhaps
8855 we are evaluating an expression too early. In any event, don't
8856 infinitely recurse. */
8860 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8862 /* If this is a constant, put it into a register if it is a
8863 legitimate constant and OFFSET is 0 and memory if it isn't. */
8864 if (CONSTANT_P (op0))
8866 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8868 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8870 op0 = force_reg (inner_mode, op0);
8872 op0 = validize_mem (force_const_mem (inner_mode, op0));
8877 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8879 /* If this object is in a register, put it into memory.
8880 This case can't occur in C, but can in Ada if we have
8881 unchecked conversion of an expression from a scalar type to
8882 an array or record type. */
8883 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8884 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8886 tree nt = build_qualified_type (TREE_TYPE (tem),
8887 (TYPE_QUALS (TREE_TYPE (tem))
8888 | TYPE_QUAL_CONST));
8889 rtx memloc = assign_temp (nt, 1, 1, 1);
8891 mark_temp_addr_taken (memloc);
8892 emit_move_insn (memloc, op0);
8896 if (GET_CODE (op0) != MEM)
8899 if (GET_MODE (offset_rtx) != ptr_mode)
8901 #ifdef POINTERS_EXTEND_UNSIGNED
8902 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8904 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8908 op0 = change_address (op0, VOIDmode,
8909 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8910 force_reg (ptr_mode,
8914 /* Don't forget about volatility even if this is a bitfield. */
8915 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8917 op0 = copy_rtx (op0);
8918 MEM_VOLATILE_P (op0) = 1;
8921 /* Check the access. */
8922 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8927 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8928 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8930 /* Check the access right of the pointer. */
8931 in_check_memory_usage = 1;
8932 if (size > BITS_PER_UNIT)
8933 emit_library_call (chkr_check_addr_libfunc,
8934 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
8935 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8936 TYPE_MODE (sizetype),
8937 GEN_INT (MEMORY_USE_RO),
8938 TYPE_MODE (integer_type_node));
8939 in_check_memory_usage = 0;
8942 /* In cases where an aligned union has an unaligned object
8943 as a field, we might be extracting a BLKmode value from
8944 an integer-mode (e.g., SImode) object. Handle this case
8945 by doing the extract into an object as wide as the field
8946 (which we know to be the width of a basic mode), then
8947 storing into memory, and changing the mode to BLKmode.
8948 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8949 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8950 if (mode1 == VOIDmode
8951 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8952 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8953 && (TYPE_ALIGN (type) > alignment
8954 || bitpos % TYPE_ALIGN (type) != 0)))
8956 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8958 if (ext_mode == BLKmode)
8960 /* In this case, BITPOS must start at a byte boundary. */
8961 if (GET_CODE (op0) != MEM
8962 || bitpos % BITS_PER_UNIT != 0)
8965 op0 = change_address (op0, VOIDmode,
8966 plus_constant (XEXP (op0, 0),
8967 bitpos / BITS_PER_UNIT));
8971 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
8973 rtx new = assign_temp (nt, 0, 1, 1);
8975 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8976 unsignedp, NULL_RTX, ext_mode,
8977 ext_mode, alignment,
8978 int_size_in_bytes (TREE_TYPE (tem)));
8980 /* If the result is a record type and BITSIZE is narrower than
8981 the mode of OP0, an integral mode, and this is a big endian
8982 machine, we must put the field into the high-order bits. */
8983 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8984 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8985 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8986 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8987 size_int (GET_MODE_BITSIZE
8992 emit_move_insn (new, op0);
8993 op0 = copy_rtx (new);
8994 PUT_MODE (op0, BLKmode);
8998 /* Get a reference to just this component. */
8999 op0 = change_address (op0, mode1,
9000 plus_constant (XEXP (op0, 0),
9001 (bitpos / BITS_PER_UNIT)));
9003 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9005 /* Adjust the alignment in case the bit position is not
9006 a multiple of the alignment of the inner object. */
9007 while (bitpos % alignment != 0)
9010 if (GET_CODE (XEXP (op0, 0)) == REG)
9011 mark_reg_pointer (XEXP (op0, 0), alignment);
9013 MEM_IN_STRUCT_P (op0) = 1;
9014 MEM_VOLATILE_P (op0) |= volatilep;
9016 *palign = alignment;
9025 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9028 /* Return the tree node if a ARG corresponds to a string constant or zero
9029 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9030 in bytes within the string that ARG is accessing. The type of the
9031 offset will be `sizetype'. */
9034 string_constant (arg, ptr_offset)
9040 if (TREE_CODE (arg) == ADDR_EXPR
9041 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9043 *ptr_offset = size_zero_node;
9044 return TREE_OPERAND (arg, 0);
9046 else if (TREE_CODE (arg) == PLUS_EXPR)
9048 tree arg0 = TREE_OPERAND (arg, 0);
9049 tree arg1 = TREE_OPERAND (arg, 1);
9054 if (TREE_CODE (arg0) == ADDR_EXPR
9055 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9057 *ptr_offset = convert (sizetype, arg1);
9058 return TREE_OPERAND (arg0, 0);
9060 else if (TREE_CODE (arg1) == ADDR_EXPR
9061 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9063 *ptr_offset = convert (sizetype, arg0);
9064 return TREE_OPERAND (arg1, 0);
9071 /* Expand code for a post- or pre- increment or decrement
9072 and return the RTX for the result.
9073 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9076 expand_increment (exp, post, ignore)
9080 register rtx op0, op1;
9081 register rtx temp, value;
9082 register tree incremented = TREE_OPERAND (exp, 0);
9083 optab this_optab = add_optab;
9085 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9086 int op0_is_copy = 0;
9087 int single_insn = 0;
9088 /* 1 means we can't store into OP0 directly,
9089 because it is a subreg narrower than a word,
9090 and we don't dare clobber the rest of the word. */
9093 /* Stabilize any component ref that might need to be
9094 evaluated more than once below. */
9096 || TREE_CODE (incremented) == BIT_FIELD_REF
9097 || (TREE_CODE (incremented) == COMPONENT_REF
9098 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9099 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9100 incremented = stabilize_reference (incremented);
9101 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9102 ones into save exprs so that they don't accidentally get evaluated
9103 more than once by the code below. */
9104 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9105 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9106 incremented = save_expr (incremented);
9108 /* Compute the operands as RTX.
9109 Note whether OP0 is the actual lvalue or a copy of it:
9110 I believe it is a copy iff it is a register or subreg
9111 and insns were generated in computing it. */
9113 temp = get_last_insn ();
9114 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9116 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9117 in place but instead must do sign- or zero-extension during assignment,
9118 so we copy it into a new register and let the code below use it as
9121 Note that we can safely modify this SUBREG since it is know not to be
9122 shared (it was made by the expand_expr call above). */
9124 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9127 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9131 else if (GET_CODE (op0) == SUBREG
9132 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9134 /* We cannot increment this SUBREG in place. If we are
9135 post-incrementing, get a copy of the old value. Otherwise,
9136 just mark that we cannot increment in place. */
9138 op0 = copy_to_reg (op0);
9143 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9144 && temp != get_last_insn ());
9145 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9146 EXPAND_MEMORY_USE_BAD);
9148 /* Decide whether incrementing or decrementing. */
9149 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9150 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9151 this_optab = sub_optab;
9153 /* Convert decrement by a constant into a negative increment. */
9154 if (this_optab == sub_optab
9155 && GET_CODE (op1) == CONST_INT)
9157 op1 = GEN_INT (-INTVAL (op1));
9158 this_optab = add_optab;
9161 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9162 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9164 /* For a preincrement, see if we can do this with a single instruction. */
9167 icode = (int) this_optab->handlers[(int) mode].insn_code;
9168 if (icode != (int) CODE_FOR_nothing
9169 /* Make sure that OP0 is valid for operands 0 and 1
9170 of the insn we want to queue. */
9171 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9172 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9173 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9177 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9178 then we cannot just increment OP0. We must therefore contrive to
9179 increment the original value. Then, for postincrement, we can return
9180 OP0 since it is a copy of the old value. For preincrement, expand here
9181 unless we can do it with a single insn.
9183 Likewise if storing directly into OP0 would clobber high bits
9184 we need to preserve (bad_subreg). */
9185 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9187 /* This is the easiest way to increment the value wherever it is.
9188 Problems with multiple evaluation of INCREMENTED are prevented
9189 because either (1) it is a component_ref or preincrement,
9190 in which case it was stabilized above, or (2) it is an array_ref
9191 with constant index in an array in a register, which is
9192 safe to reevaluate. */
9193 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9194 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9195 ? MINUS_EXPR : PLUS_EXPR),
9198 TREE_OPERAND (exp, 1));
9200 while (TREE_CODE (incremented) == NOP_EXPR
9201 || TREE_CODE (incremented) == CONVERT_EXPR)
9203 newexp = convert (TREE_TYPE (incremented), newexp);
9204 incremented = TREE_OPERAND (incremented, 0);
9207 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9208 return post ? op0 : temp;
9213 /* We have a true reference to the value in OP0.
9214 If there is an insn to add or subtract in this mode, queue it.
9215 Queueing the increment insn avoids the register shuffling
9216 that often results if we must increment now and first save
9217 the old value for subsequent use. */
9219 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9220 op0 = stabilize (op0);
9223 icode = (int) this_optab->handlers[(int) mode].insn_code;
9224 if (icode != (int) CODE_FOR_nothing
9225 /* Make sure that OP0 is valid for operands 0 and 1
9226 of the insn we want to queue. */
9227 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9228 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9230 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9231 op1 = force_reg (mode, op1);
9233 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9235 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9237 rtx addr = (general_operand (XEXP (op0, 0), mode)
9238 ? force_reg (Pmode, XEXP (op0, 0))
9239 : copy_to_reg (XEXP (op0, 0)));
9242 op0 = change_address (op0, VOIDmode, addr);
9243 temp = force_reg (GET_MODE (op0), op0);
9244 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9245 op1 = force_reg (mode, op1);
9247 /* The increment queue is LIFO, thus we have to `queue'
9248 the instructions in reverse order. */
9249 enqueue_insn (op0, gen_move_insn (op0, temp));
9250 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9255 /* Preincrement, or we can't increment with one simple insn. */
9257 /* Save a copy of the value before inc or dec, to return it later. */
9258 temp = value = copy_to_reg (op0);
9260 /* Arrange to return the incremented value. */
9261 /* Copy the rtx because expand_binop will protect from the queue,
9262 and the results of that would be invalid for us to return
9263 if our caller does emit_queue before using our result. */
9264 temp = copy_rtx (value = op0);
9266 /* Increment however we can. */
9267 op1 = expand_binop (mode, this_optab, value, op1,
9268 current_function_check_memory_usage ? NULL_RTX : op0,
9269 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9270 /* Make sure the value is stored into OP0. */
9272 emit_move_insn (op0, op1);
9277 /* At the start of a function, record that we have no previously-pushed
9278 arguments waiting to be popped. */
9281 init_pending_stack_adjust ()
9283 pending_stack_adjust = 0;
9286 /* When exiting from function, if safe, clear out any pending stack adjust
9287 so the adjustment won't get done.
9289 Note, if the current function calls alloca, then it must have a
9290 frame pointer regardless of the value of flag_omit_frame_pointer. */
9293 clear_pending_stack_adjust ()
9295 #ifdef EXIT_IGNORE_STACK
9297 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9298 && EXIT_IGNORE_STACK
9299 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9300 && ! flag_inline_functions)
9302 stack_pointer_delta -= pending_stack_adjust,
9303 pending_stack_adjust = 0;
9308 /* Pop any previously-pushed arguments that have not been popped yet. */
9311 do_pending_stack_adjust ()
9313 if (inhibit_defer_pop == 0)
9315 if (pending_stack_adjust != 0)
9316 adjust_stack (GEN_INT (pending_stack_adjust));
9317 pending_stack_adjust = 0;
9321 /* Expand conditional expressions. */
9323 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9324 LABEL is an rtx of code CODE_LABEL, in this function and all the
9328 jumpifnot (exp, label)
9332 do_jump (exp, label, NULL_RTX);
9335 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9342 do_jump (exp, NULL_RTX, label);
9345 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9346 the result is zero, or IF_TRUE_LABEL if the result is one.
9347 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9348 meaning fall through in that case.
9350 do_jump always does any pending stack adjust except when it does not
9351 actually perform a jump. An example where there is no jump
9352 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9354 This function is responsible for optimizing cases such as
9355 &&, || and comparison operators in EXP. */
9358 do_jump (exp, if_false_label, if_true_label)
9360 rtx if_false_label, if_true_label;
9362 register enum tree_code code = TREE_CODE (exp);
9363 /* Some cases need to create a label to jump to
9364 in order to properly fall through.
9365 These cases set DROP_THROUGH_LABEL nonzero. */
9366 rtx drop_through_label = 0;
9370 enum machine_mode mode;
9372 #ifdef MAX_INTEGER_COMPUTATION_MODE
9373 check_max_integer_computation_mode (exp);
9384 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9390 /* This is not true with #pragma weak */
9392 /* The address of something can never be zero. */
9394 emit_jump (if_true_label);
9399 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9400 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9401 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9404 /* If we are narrowing the operand, we have to do the compare in the
9406 if ((TYPE_PRECISION (TREE_TYPE (exp))
9407 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9409 case NON_LVALUE_EXPR:
9410 case REFERENCE_EXPR:
9415 /* These cannot change zero->non-zero or vice versa. */
9416 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9419 case WITH_RECORD_EXPR:
9420 /* Put the object on the placeholder list, recurse through our first
9421 operand, and pop the list. */
9422 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9424 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9425 placeholder_list = TREE_CHAIN (placeholder_list);
9429 /* This is never less insns than evaluating the PLUS_EXPR followed by
9430 a test and can be longer if the test is eliminated. */
9432 /* Reduce to minus. */
9433 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9434 TREE_OPERAND (exp, 0),
9435 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9436 TREE_OPERAND (exp, 1))));
9437 /* Process as MINUS. */
9441 /* Non-zero iff operands of minus differ. */
9442 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9443 TREE_OPERAND (exp, 0),
9444 TREE_OPERAND (exp, 1)),
9445 NE, NE, if_false_label, if_true_label);
9449 /* If we are AND'ing with a small constant, do this comparison in the
9450 smallest type that fits. If the machine doesn't have comparisons
9451 that small, it will be converted back to the wider comparison.
9452 This helps if we are testing the sign bit of a narrower object.
9453 combine can't do this for us because it can't know whether a
9454 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9456 if (! SLOW_BYTE_ACCESS
9457 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9458 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9459 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9460 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9461 && (type = type_for_mode (mode, 1)) != 0
9462 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9463 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9464 != CODE_FOR_nothing))
9466 do_jump (convert (type, exp), if_false_label, if_true_label);
9471 case TRUTH_NOT_EXPR:
9472 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9475 case TRUTH_ANDIF_EXPR:
9476 if (if_false_label == 0)
9477 if_false_label = drop_through_label = gen_label_rtx ();
9478 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9479 start_cleanup_deferral ();
9480 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9481 end_cleanup_deferral ();
9484 case TRUTH_ORIF_EXPR:
9485 if (if_true_label == 0)
9486 if_true_label = drop_through_label = gen_label_rtx ();
9487 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9488 start_cleanup_deferral ();
9489 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9490 end_cleanup_deferral ();
9495 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9496 preserve_temp_slots (NULL_RTX);
9500 do_pending_stack_adjust ();
9501 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9508 HOST_WIDE_INT bitsize, bitpos;
9510 enum machine_mode mode;
9514 unsigned int alignment;
9516 /* Get description of this reference. We don't actually care
9517 about the underlying object here. */
9518 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9519 &unsignedp, &volatilep, &alignment);
9521 type = type_for_size (bitsize, unsignedp);
9522 if (! SLOW_BYTE_ACCESS
9523 && type != 0 && bitsize >= 0
9524 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9525 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9526 != CODE_FOR_nothing))
9528 do_jump (convert (type, exp), if_false_label, if_true_label);
9535 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9536 if (integer_onep (TREE_OPERAND (exp, 1))
9537 && integer_zerop (TREE_OPERAND (exp, 2)))
9538 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9540 else if (integer_zerop (TREE_OPERAND (exp, 1))
9541 && integer_onep (TREE_OPERAND (exp, 2)))
9542 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9546 register rtx label1 = gen_label_rtx ();
9547 drop_through_label = gen_label_rtx ();
9549 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9551 start_cleanup_deferral ();
9552 /* Now the THEN-expression. */
9553 do_jump (TREE_OPERAND (exp, 1),
9554 if_false_label ? if_false_label : drop_through_label,
9555 if_true_label ? if_true_label : drop_through_label);
9556 /* In case the do_jump just above never jumps. */
9557 do_pending_stack_adjust ();
9558 emit_label (label1);
9560 /* Now the ELSE-expression. */
9561 do_jump (TREE_OPERAND (exp, 2),
9562 if_false_label ? if_false_label : drop_through_label,
9563 if_true_label ? if_true_label : drop_through_label);
9564 end_cleanup_deferral ();
9570 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9572 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9573 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9575 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9576 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9579 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9580 fold (build (EQ_EXPR, TREE_TYPE (exp),
9581 fold (build1 (REALPART_EXPR,
9582 TREE_TYPE (inner_type),
9584 fold (build1 (REALPART_EXPR,
9585 TREE_TYPE (inner_type),
9587 fold (build (EQ_EXPR, TREE_TYPE (exp),
9588 fold (build1 (IMAGPART_EXPR,
9589 TREE_TYPE (inner_type),
9591 fold (build1 (IMAGPART_EXPR,
9592 TREE_TYPE (inner_type),
9594 if_false_label, if_true_label);
9597 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9598 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9600 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9601 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9602 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9604 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9610 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9612 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9613 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9615 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9616 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9619 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9620 fold (build (NE_EXPR, TREE_TYPE (exp),
9621 fold (build1 (REALPART_EXPR,
9622 TREE_TYPE (inner_type),
9624 fold (build1 (REALPART_EXPR,
9625 TREE_TYPE (inner_type),
9627 fold (build (NE_EXPR, TREE_TYPE (exp),
9628 fold (build1 (IMAGPART_EXPR,
9629 TREE_TYPE (inner_type),
9631 fold (build1 (IMAGPART_EXPR,
9632 TREE_TYPE (inner_type),
9634 if_false_label, if_true_label);
9637 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9638 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9640 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9641 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9642 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9644 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9649 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9650 if (GET_MODE_CLASS (mode) == MODE_INT
9651 && ! can_compare_p (LT, mode, ccp_jump))
9652 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9654 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9658 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9659 if (GET_MODE_CLASS (mode) == MODE_INT
9660 && ! can_compare_p (LE, mode, ccp_jump))
9661 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9663 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9667 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9668 if (GET_MODE_CLASS (mode) == MODE_INT
9669 && ! can_compare_p (GT, mode, ccp_jump))
9670 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9672 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9676 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9677 if (GET_MODE_CLASS (mode) == MODE_INT
9678 && ! can_compare_p (GE, mode, ccp_jump))
9679 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9681 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9684 case UNORDERED_EXPR:
9687 enum rtx_code cmp, rcmp;
9690 if (code == UNORDERED_EXPR)
9691 cmp = UNORDERED, rcmp = ORDERED;
9693 cmp = ORDERED, rcmp = UNORDERED;
9694 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9697 if (! can_compare_p (cmp, mode, ccp_jump)
9698 && (can_compare_p (rcmp, mode, ccp_jump)
9699 /* If the target doesn't provide either UNORDERED or ORDERED
9700 comparisons, canonicalize on UNORDERED for the library. */
9701 || rcmp == UNORDERED))
9705 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9707 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9712 enum rtx_code rcode1;
9713 enum tree_code tcode2;
9737 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9738 if (can_compare_p (rcode1, mode, ccp_jump))
9739 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9743 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9744 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9747 /* If the target doesn't support combined unordered
9748 compares, decompose into UNORDERED + comparison. */
9749 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9750 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9751 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9752 do_jump (exp, if_false_label, if_true_label);
9759 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9761 /* This is not needed any more and causes poor code since it causes
9762 comparisons and tests from non-SI objects to have different code
9764 /* Copy to register to avoid generating bad insns by cse
9765 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9766 if (!cse_not_expected && GET_CODE (temp) == MEM)
9767 temp = copy_to_reg (temp);
9769 do_pending_stack_adjust ();
9770 /* Do any postincrements in the expression that was tested. */
9773 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9775 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9779 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9780 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9781 /* Note swapping the labels gives us not-equal. */
9782 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9783 else if (GET_MODE (temp) != VOIDmode)
9784 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9785 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9786 GET_MODE (temp), NULL_RTX, 0,
9787 if_false_label, if_true_label);
9792 if (drop_through_label)
9794 /* If do_jump produces code that might be jumped around,
9795 do any stack adjusts from that code, before the place
9796 where control merges in. */
9797 do_pending_stack_adjust ();
9798 emit_label (drop_through_label);
9802 /* Given a comparison expression EXP for values too wide to be compared
9803 with one insn, test the comparison and jump to the appropriate label.
9804 The code of EXP is ignored; we always test GT if SWAP is 0,
9805 and LT if SWAP is 1. */
9808 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9811 rtx if_false_label, if_true_label;
9813 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9814 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9815 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9816 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9818 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9821 /* Compare OP0 with OP1, word at a time, in mode MODE.
9822 UNSIGNEDP says to do unsigned comparison.
9823 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9826 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9827 enum machine_mode mode;
9830 rtx if_false_label, if_true_label;
9832 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9833 rtx drop_through_label = 0;
9836 if (! if_true_label || ! if_false_label)
9837 drop_through_label = gen_label_rtx ();
9838 if (! if_true_label)
9839 if_true_label = drop_through_label;
9840 if (! if_false_label)
9841 if_false_label = drop_through_label;
9843 /* Compare a word at a time, high order first. */
9844 for (i = 0; i < nwords; i++)
9846 rtx op0_word, op1_word;
9848 if (WORDS_BIG_ENDIAN)
9850 op0_word = operand_subword_force (op0, i, mode);
9851 op1_word = operand_subword_force (op1, i, mode);
9855 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9856 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9859 /* All but high-order word must be compared as unsigned. */
9860 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9861 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9862 NULL_RTX, if_true_label);
9864 /* Consider lower words only if these are equal. */
9865 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9866 NULL_RTX, 0, NULL_RTX, if_false_label);
9870 emit_jump (if_false_label);
9871 if (drop_through_label)
9872 emit_label (drop_through_label);
9875 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9876 with one insn, test the comparison and jump to the appropriate label. */
9879 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9881 rtx if_false_label, if_true_label;
9883 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9884 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9885 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9886 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9888 rtx drop_through_label = 0;
9890 if (! if_false_label)
9891 drop_through_label = if_false_label = gen_label_rtx ();
9893 for (i = 0; i < nwords; i++)
9894 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9895 operand_subword_force (op1, i, mode),
9896 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9897 word_mode, NULL_RTX, 0, if_false_label,
9901 emit_jump (if_true_label);
9902 if (drop_through_label)
9903 emit_label (drop_through_label);
9906 /* Jump according to whether OP0 is 0.
9907 We assume that OP0 has an integer mode that is too wide
9908 for the available compare insns. */
9911 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9913 rtx if_false_label, if_true_label;
9915 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9918 rtx drop_through_label = 0;
9920 /* The fastest way of doing this comparison on almost any machine is to
9921 "or" all the words and compare the result. If all have to be loaded
9922 from memory and this is a very wide item, it's possible this may
9923 be slower, but that's highly unlikely. */
9925 part = gen_reg_rtx (word_mode);
9926 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9927 for (i = 1; i < nwords && part != 0; i++)
9928 part = expand_binop (word_mode, ior_optab, part,
9929 operand_subword_force (op0, i, GET_MODE (op0)),
9930 part, 1, OPTAB_WIDEN);
9934 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9935 NULL_RTX, 0, if_false_label, if_true_label);
9940 /* If we couldn't do the "or" simply, do this with a series of compares. */
9941 if (! if_false_label)
9942 drop_through_label = if_false_label = gen_label_rtx ();
9944 for (i = 0; i < nwords; i++)
9945 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9946 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9947 if_false_label, NULL_RTX);
9950 emit_jump (if_true_label);
9952 if (drop_through_label)
9953 emit_label (drop_through_label);
9956 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9957 (including code to compute the values to be compared)
9958 and set (CC0) according to the result.
9959 The decision as to signed or unsigned comparison must be made by the caller.
9961 We force a stack adjustment unless there are currently
9962 things pushed on the stack that aren't yet used.
9964 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9967 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9968 size of MODE should be used. */
9971 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9972 register rtx op0, op1;
9975 enum machine_mode mode;
9981 /* If one operand is constant, make it the second one. Only do this
9982 if the other operand is not constant as well. */
9984 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9985 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9990 code = swap_condition (code);
9995 op0 = force_not_mem (op0);
9996 op1 = force_not_mem (op1);
9999 do_pending_stack_adjust ();
10001 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10002 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10006 /* There's no need to do this now that combine.c can eliminate lots of
10007 sign extensions. This can be less efficient in certain cases on other
10010 /* If this is a signed equality comparison, we can do it as an
10011 unsigned comparison since zero-extension is cheaper than sign
10012 extension and comparisons with zero are done as unsigned. This is
10013 the case even on machines that can do fast sign extension, since
10014 zero-extension is easier to combine with other operations than
10015 sign-extension is. If we are comparing against a constant, we must
10016 convert it to what it would look like unsigned. */
10017 if ((code == EQ || code == NE) && ! unsignedp
10018 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10020 if (GET_CODE (op1) == CONST_INT
10021 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10022 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10027 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10029 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10032 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10033 The decision as to signed or unsigned comparison must be made by the caller.
10035 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10038 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10039 size of MODE should be used. */
10042 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10043 if_false_label, if_true_label)
10044 register rtx op0, op1;
10045 enum rtx_code code;
10047 enum machine_mode mode;
10049 unsigned int align;
10050 rtx if_false_label, if_true_label;
10053 int dummy_true_label = 0;
10055 /* Reverse the comparison if that is safe and we want to jump if it is
10057 if (! if_true_label && ! FLOAT_MODE_P (mode))
10059 if_true_label = if_false_label;
10060 if_false_label = 0;
10061 code = reverse_condition (code);
10064 /* If one operand is constant, make it the second one. Only do this
10065 if the other operand is not constant as well. */
10067 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10068 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10073 code = swap_condition (code);
10076 if (flag_force_mem)
10078 op0 = force_not_mem (op0);
10079 op1 = force_not_mem (op1);
10082 do_pending_stack_adjust ();
10084 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10085 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10087 if (tem == const_true_rtx)
10090 emit_jump (if_true_label);
10094 if (if_false_label)
10095 emit_jump (if_false_label);
10101 /* There's no need to do this now that combine.c can eliminate lots of
10102 sign extensions. This can be less efficient in certain cases on other
10105 /* If this is a signed equality comparison, we can do it as an
10106 unsigned comparison since zero-extension is cheaper than sign
10107 extension and comparisons with zero are done as unsigned. This is
10108 the case even on machines that can do fast sign extension, since
10109 zero-extension is easier to combine with other operations than
10110 sign-extension is. If we are comparing against a constant, we must
10111 convert it to what it would look like unsigned. */
10112 if ((code == EQ || code == NE) && ! unsignedp
10113 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10115 if (GET_CODE (op1) == CONST_INT
10116 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10117 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10122 if (! if_true_label)
10124 dummy_true_label = 1;
10125 if_true_label = gen_label_rtx ();
10128 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10131 if (if_false_label)
10132 emit_jump (if_false_label);
10133 if (dummy_true_label)
10134 emit_label (if_true_label);
10137 /* Generate code for a comparison expression EXP (including code to compute
10138 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10139 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10140 generated code will drop through.
10141 SIGNED_CODE should be the rtx operation for this comparison for
10142 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10144 We force a stack adjustment unless there are currently
10145 things pushed on the stack that aren't yet used. */
10148 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10151 enum rtx_code signed_code, unsigned_code;
10152 rtx if_false_label, if_true_label;
10154 unsigned int align0, align1;
10155 register rtx op0, op1;
10156 register tree type;
10157 register enum machine_mode mode;
10159 enum rtx_code code;
10161 /* Don't crash if the comparison was erroneous. */
10162 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10163 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10166 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10167 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10170 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10171 mode = TYPE_MODE (type);
10172 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10173 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10174 || (GET_MODE_BITSIZE (mode)
10175 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10178 /* op0 might have been replaced by promoted constant, in which
10179 case the type of second argument should be used. */
10180 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10181 mode = TYPE_MODE (type);
10183 unsignedp = TREE_UNSIGNED (type);
10184 code = unsignedp ? unsigned_code : signed_code;
10186 #ifdef HAVE_canonicalize_funcptr_for_compare
10187 /* If function pointers need to be "canonicalized" before they can
10188 be reliably compared, then canonicalize them. */
10189 if (HAVE_canonicalize_funcptr_for_compare
10190 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10191 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10194 rtx new_op0 = gen_reg_rtx (mode);
10196 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10200 if (HAVE_canonicalize_funcptr_for_compare
10201 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10202 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10205 rtx new_op1 = gen_reg_rtx (mode);
10207 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10212 /* Do any postincrements in the expression that was tested. */
10215 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10217 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10218 MIN (align0, align1),
10219 if_false_label, if_true_label);
10222 /* Generate code to calculate EXP using a store-flag instruction
10223 and return an rtx for the result. EXP is either a comparison
10224 or a TRUTH_NOT_EXPR whose operand is a comparison.
10226 If TARGET is nonzero, store the result there if convenient.
10228 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10231 Return zero if there is no suitable set-flag instruction
10232 available on this machine.
10234 Once expand_expr has been called on the arguments of the comparison,
10235 we are committed to doing the store flag, since it is not safe to
10236 re-evaluate the expression. We emit the store-flag insn by calling
10237 emit_store_flag, but only expand the arguments if we have a reason
10238 to believe that emit_store_flag will be successful. If we think that
10239 it will, but it isn't, we have to simulate the store-flag with a
10240 set/jump/set sequence. */
10243 do_store_flag (exp, target, mode, only_cheap)
10246 enum machine_mode mode;
10249 enum rtx_code code;
10250 tree arg0, arg1, type;
10252 enum machine_mode operand_mode;
10256 enum insn_code icode;
10257 rtx subtarget = target;
10260 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10261 result at the end. We can't simply invert the test since it would
10262 have already been inverted if it were valid. This case occurs for
10263 some floating-point comparisons. */
10265 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10266 invert = 1, exp = TREE_OPERAND (exp, 0);
10268 arg0 = TREE_OPERAND (exp, 0);
10269 arg1 = TREE_OPERAND (exp, 1);
10271 /* Don't crash if the comparison was erroneous. */
10272 if (arg0 == error_mark_node || arg1 == error_mark_node)
10275 type = TREE_TYPE (arg0);
10276 operand_mode = TYPE_MODE (type);
10277 unsignedp = TREE_UNSIGNED (type);
10279 /* We won't bother with BLKmode store-flag operations because it would mean
10280 passing a lot of information to emit_store_flag. */
10281 if (operand_mode == BLKmode)
10284 /* We won't bother with store-flag operations involving function pointers
10285 when function pointers must be canonicalized before comparisons. */
10286 #ifdef HAVE_canonicalize_funcptr_for_compare
10287 if (HAVE_canonicalize_funcptr_for_compare
10288 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10289 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10291 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10292 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10293 == FUNCTION_TYPE))))
10300 /* Get the rtx comparison code to use. We know that EXP is a comparison
10301 operation of some type. Some comparisons against 1 and -1 can be
10302 converted to comparisons with zero. Do so here so that the tests
10303 below will be aware that we have a comparison with zero. These
10304 tests will not catch constants in the first operand, but constants
10305 are rarely passed as the first operand. */
10307 switch (TREE_CODE (exp))
10316 if (integer_onep (arg1))
10317 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10319 code = unsignedp ? LTU : LT;
10322 if (! unsignedp && integer_all_onesp (arg1))
10323 arg1 = integer_zero_node, code = LT;
10325 code = unsignedp ? LEU : LE;
10328 if (! unsignedp && integer_all_onesp (arg1))
10329 arg1 = integer_zero_node, code = GE;
10331 code = unsignedp ? GTU : GT;
10334 if (integer_onep (arg1))
10335 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10337 code = unsignedp ? GEU : GE;
10340 case UNORDERED_EXPR:
10366 /* Put a constant second. */
10367 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10369 tem = arg0; arg0 = arg1; arg1 = tem;
10370 code = swap_condition (code);
10373 /* If this is an equality or inequality test of a single bit, we can
10374 do this by shifting the bit being tested to the low-order bit and
10375 masking the result with the constant 1. If the condition was EQ,
10376 we xor it with 1. This does not require an scc insn and is faster
10377 than an scc insn even if we have it. */
10379 if ((code == NE || code == EQ)
10380 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10381 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10383 tree inner = TREE_OPERAND (arg0, 0);
10384 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10387 /* If INNER is a right shift of a constant and it plus BITNUM does
10388 not overflow, adjust BITNUM and INNER. */
10390 if (TREE_CODE (inner) == RSHIFT_EXPR
10391 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10392 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10393 && bitnum < TYPE_PRECISION (type)
10394 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10395 bitnum - TYPE_PRECISION (type)))
10397 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10398 inner = TREE_OPERAND (inner, 0);
10401 /* If we are going to be able to omit the AND below, we must do our
10402 operations as unsigned. If we must use the AND, we have a choice.
10403 Normally unsigned is faster, but for some machines signed is. */
10404 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10405 #ifdef LOAD_EXTEND_OP
10406 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10412 if (! get_subtarget (subtarget)
10413 || GET_MODE (subtarget) != operand_mode
10414 || ! safe_from_p (subtarget, inner, 1))
10417 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10420 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10421 size_int (bitnum), subtarget, ops_unsignedp);
10423 if (GET_MODE (op0) != mode)
10424 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10426 if ((code == EQ && ! invert) || (code == NE && invert))
10427 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10428 ops_unsignedp, OPTAB_LIB_WIDEN);
10430 /* Put the AND last so it can combine with more things. */
10431 if (bitnum != TYPE_PRECISION (type) - 1)
10432 op0 = expand_and (op0, const1_rtx, subtarget);
10437 /* Now see if we are likely to be able to do this. Return if not. */
10438 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10441 icode = setcc_gen_code[(int) code];
10442 if (icode == CODE_FOR_nothing
10443 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10445 /* We can only do this if it is one of the special cases that
10446 can be handled without an scc insn. */
10447 if ((code == LT && integer_zerop (arg1))
10448 || (! only_cheap && code == GE && integer_zerop (arg1)))
10450 else if (BRANCH_COST >= 0
10451 && ! only_cheap && (code == NE || code == EQ)
10452 && TREE_CODE (type) != REAL_TYPE
10453 && ((abs_optab->handlers[(int) operand_mode].insn_code
10454 != CODE_FOR_nothing)
10455 || (ffs_optab->handlers[(int) operand_mode].insn_code
10456 != CODE_FOR_nothing)))
10462 if (! get_subtarget (target)
10463 || GET_MODE (subtarget) != operand_mode
10464 || ! safe_from_p (subtarget, arg1, 1))
10467 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10468 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10471 target = gen_reg_rtx (mode);
10473 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10474 because, if the emit_store_flag does anything it will succeed and
10475 OP0 and OP1 will not be used subsequently. */
10477 result = emit_store_flag (target, code,
10478 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10479 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10480 operand_mode, unsignedp, 1);
10485 result = expand_binop (mode, xor_optab, result, const1_rtx,
10486 result, 0, OPTAB_LIB_WIDEN);
10490 /* If this failed, we have to do this with set/compare/jump/set code. */
10491 if (GET_CODE (target) != REG
10492 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10493 target = gen_reg_rtx (GET_MODE (target));
10495 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10496 result = compare_from_rtx (op0, op1, code, unsignedp,
10497 operand_mode, NULL_RTX, 0);
10498 if (GET_CODE (result) == CONST_INT)
10499 return (((result == const0_rtx && ! invert)
10500 || (result != const0_rtx && invert))
10501 ? const0_rtx : const1_rtx);
10503 label = gen_label_rtx ();
10504 if (bcc_gen_fctn[(int) code] == 0)
10507 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10508 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10509 emit_label (label);
10514 /* Generate a tablejump instruction (used for switch statements). */
10516 #ifdef HAVE_tablejump
10518 /* INDEX is the value being switched on, with the lowest value
10519 in the table already subtracted.
10520 MODE is its expected mode (needed if INDEX is constant).
10521 RANGE is the length of the jump table.
10522 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10524 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10525 index value is out of range. */
10528 do_tablejump (index, mode, range, table_label, default_label)
10529 rtx index, range, table_label, default_label;
10530 enum machine_mode mode;
10532 register rtx temp, vector;
10534 /* Do an unsigned comparison (in the proper mode) between the index
10535 expression and the value which represents the length of the range.
10536 Since we just finished subtracting the lower bound of the range
10537 from the index expression, this comparison allows us to simultaneously
10538 check that the original index expression value is both greater than
10539 or equal to the minimum value of the range and less than or equal to
10540 the maximum value of the range. */
10542 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10545 /* If index is in range, it must fit in Pmode.
10546 Convert to Pmode so we can index with it. */
10548 index = convert_to_mode (Pmode, index, 1);
10550 /* Don't let a MEM slip thru, because then INDEX that comes
10551 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10552 and break_out_memory_refs will go to work on it and mess it up. */
10553 #ifdef PIC_CASE_VECTOR_ADDRESS
10554 if (flag_pic && GET_CODE (index) != REG)
10555 index = copy_to_mode_reg (Pmode, index);
10558 /* If flag_force_addr were to affect this address
10559 it could interfere with the tricky assumptions made
10560 about addresses that contain label-refs,
10561 which may be valid only very near the tablejump itself. */
10562 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10563 GET_MODE_SIZE, because this indicates how large insns are. The other
10564 uses should all be Pmode, because they are addresses. This code
10565 could fail if addresses and insns are not the same size. */
10566 index = gen_rtx_PLUS (Pmode,
10567 gen_rtx_MULT (Pmode, index,
10568 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10569 gen_rtx_LABEL_REF (Pmode, table_label));
10570 #ifdef PIC_CASE_VECTOR_ADDRESS
10572 index = PIC_CASE_VECTOR_ADDRESS (index);
10575 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10576 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10577 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10578 RTX_UNCHANGING_P (vector) = 1;
10579 convert_move (temp, vector, 0);
10581 emit_jump_insn (gen_tablejump (temp, table_label));
10583 /* If we are generating PIC code or if the table is PC-relative, the
10584 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10585 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10589 #endif /* HAVE_tablejump */