1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "typeclass.h"
46 #include "langhooks.h"
50 /* Decide whether a function's arguments should be processed
51 from first to last or from last to first.
53 They should if the stack and args grow in opposite directions, but
54 only if we have push insns. */
58 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59 #define PUSH_ARGS_REVERSED /* If it's last to first. */
64 #ifndef STACK_PUSH_CODE
65 #ifdef STACK_GROWS_DOWNWARD
66 #define STACK_PUSH_CODE PRE_DEC
68 #define STACK_PUSH_CODE PRE_INC
72 /* Assume that case vectors are not pc-relative. */
73 #ifndef CASE_VECTOR_PC_RELATIVE
74 #define CASE_VECTOR_PC_RELATIVE 0
77 /* If this is nonzero, we do not bother generating VOLATILE
78 around volatile memory references, and we are willing to
79 output indirect addresses. If cse is to follow, we reject
80 indirect addresses so a useful potential cse is generated;
81 if it is used only once, instruction combination will produce
82 the same indirect address eventually. */
85 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
86 static tree placeholder_list = 0;
88 /* This structure is used by move_by_pieces to describe the move to
99 int explicit_inc_from;
100 unsigned HOST_WIDE_INT len;
101 HOST_WIDE_INT offset;
105 /* This structure is used by store_by_pieces to describe the clear to
108 struct store_by_pieces
114 unsigned HOST_WIDE_INT len;
115 HOST_WIDE_INT offset;
116 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
121 extern struct obstack permanent_obstack;
123 static rtx enqueue_insn PARAMS ((rtx, rtx));
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
125 PARAMS ((unsigned HOST_WIDE_INT,
127 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *));
129 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
131 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
133 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
135 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
137 struct store_by_pieces *));
138 static rtx compress_float_constant PARAMS ((rtx, rtx));
139 static rtx get_subtarget PARAMS ((rtx));
140 static int is_zeros_p PARAMS ((tree));
141 static int mostly_zeros_p PARAMS ((tree));
142 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int));
145 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
146 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
147 HOST_WIDE_INT, enum machine_mode,
148 tree, enum machine_mode, int, tree,
150 static rtx var_rtx PARAMS ((tree));
151 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
152 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
153 static int is_aligning_offset PARAMS ((tree, tree));
154 static rtx expand_increment PARAMS ((tree, int, int));
155 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
156 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
157 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
159 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
161 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
163 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
165 /* Record for each mode whether we can move a register directly to or
166 from an object of that mode in memory. If we can't, we won't try
167 to use that mode directly when accessing a field of that mode. */
169 static char direct_load[NUM_MACHINE_MODES];
170 static char direct_store[NUM_MACHINE_MODES];
172 /* Record for each mode whether we can float-extend from memory. */
174 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
176 /* If a memory-to-memory move would take MOVE_RATIO or more simple
177 move-instruction sequences, we will do a movstr or libcall instead. */
180 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
183 /* If we are optimizing for space (-Os), cut down the default move ratio. */
184 #define MOVE_RATIO (optimize_size ? 3 : 15)
188 /* This macro is used to determine whether move_by_pieces should be called
189 to perform a structure copy. */
190 #ifndef MOVE_BY_PIECES_P
191 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
192 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
195 /* This array records the insn_code of insns to perform block moves. */
196 enum insn_code movstr_optab[NUM_MACHINE_MODES];
198 /* This array records the insn_code of insns to perform block clears. */
199 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
201 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
203 #ifndef SLOW_UNALIGNED_ACCESS
204 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
207 /* This is run once per compilation to set up which modes can be used
208 directly in memory and to initialize the block move optab. */
214 enum machine_mode mode;
219 /* Try indexing by frame ptr and try by stack ptr.
220 It is known that on the Convex the stack ptr isn't a valid index.
221 With luck, one or the other is valid on any machine. */
222 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
223 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
225 /* A scratch register we can modify in-place below to avoid
226 useless RTL allocations. */
227 reg = gen_rtx_REG (VOIDmode, -1);
229 insn = rtx_alloc (INSN);
230 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
231 PATTERN (insn) = pat;
233 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
234 mode = (enum machine_mode) ((int) mode + 1))
238 direct_load[(int) mode] = direct_store[(int) mode] = 0;
239 PUT_MODE (mem, mode);
240 PUT_MODE (mem1, mode);
241 PUT_MODE (reg, mode);
243 /* See if there is some register that can be used in this mode and
244 directly loaded or stored from memory. */
246 if (mode != VOIDmode && mode != BLKmode)
247 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
248 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
251 if (! HARD_REGNO_MODE_OK (regno, mode))
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
261 SET_SRC (pat) = mem1;
262 SET_DEST (pat) = reg;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_load[(int) mode] = 1;
267 SET_DEST (pat) = mem;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
272 SET_DEST (pat) = mem1;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_store[(int) mode] = 1;
278 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
280 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
281 mode = GET_MODE_WIDER_MODE (mode))
283 enum machine_mode srcmode;
284 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
285 srcmode = GET_MODE_WIDER_MODE (srcmode))
289 ic = can_extend_p (mode, srcmode, 0);
290 if (ic == CODE_FOR_nothing)
293 PUT_MODE (mem, srcmode);
295 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
296 float_extend_from_mem[mode][srcmode] = true;
301 /* This is run at the start of compiling a function. */
306 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
309 pending_stack_adjust = 0;
310 stack_pointer_delta = 0;
311 inhibit_defer_pop = 0;
313 apply_args_value = 0;
317 /* Small sanity check that the queue is empty at the end of a function. */
320 finish_expr_for_function ()
326 /* Manage the queue of increment instructions to be output
327 for POSTINCREMENT_EXPR expressions, etc. */
329 /* Queue up to increment (or change) VAR later. BODY says how:
330 BODY should be the same thing you would pass to emit_insn
331 to increment right away. It will go to emit_insn later on.
333 The value is a QUEUED expression to be used in place of VAR
334 where you want to guarantee the pre-incrementation value of VAR. */
337 enqueue_insn (var, body)
340 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
341 body, pending_chain);
342 return pending_chain;
345 /* Use protect_from_queue to convert a QUEUED expression
346 into something that you can put immediately into an instruction.
347 If the queued incrementation has not happened yet,
348 protect_from_queue returns the variable itself.
349 If the incrementation has happened, protect_from_queue returns a temp
350 that contains a copy of the old value of the variable.
352 Any time an rtx which might possibly be a QUEUED is to be put
353 into an instruction, it must be passed through protect_from_queue first.
354 QUEUED expressions are not meaningful in instructions.
356 Do not pass a value through protect_from_queue and then hold
357 on to it for a while before putting it in an instruction!
358 If the queue is flushed in between, incorrect code will result. */
361 protect_from_queue (x, modify)
365 RTX_CODE code = GET_CODE (x);
367 #if 0 /* A QUEUED can hang around after the queue is forced out. */
368 /* Shortcut for most common case. */
369 if (pending_chain == 0)
375 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
376 use of autoincrement. Make a copy of the contents of the memory
377 location rather than a copy of the address, but not if the value is
378 of mode BLKmode. Don't modify X in place since it might be
380 if (code == MEM && GET_MODE (x) != BLKmode
381 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
384 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
388 rtx temp = gen_reg_rtx (GET_MODE (x));
390 emit_insn_before (gen_move_insn (temp, new),
395 /* Copy the address into a pseudo, so that the returned value
396 remains correct across calls to emit_queue. */
397 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
400 /* Otherwise, recursively protect the subexpressions of all
401 the kinds of rtx's that can contain a QUEUED. */
404 rtx tem = protect_from_queue (XEXP (x, 0), 0);
405 if (tem != XEXP (x, 0))
411 else if (code == PLUS || code == MULT)
413 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
414 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
415 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
424 /* If the increment has not happened, use the variable itself. Copy it
425 into a new pseudo so that the value remains correct across calls to
427 if (QUEUED_INSN (x) == 0)
428 return copy_to_reg (QUEUED_VAR (x));
429 /* If the increment has happened and a pre-increment copy exists,
431 if (QUEUED_COPY (x) != 0)
432 return QUEUED_COPY (x);
433 /* The increment has happened but we haven't set up a pre-increment copy.
434 Set one up now, and use it. */
435 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
436 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
438 return QUEUED_COPY (x);
441 /* Return nonzero if X contains a QUEUED expression:
442 if it contains anything that will be altered by a queued increment.
443 We handle only combinations of MEM, PLUS, MINUS and MULT operators
444 since memory addresses generally contain only those. */
450 enum rtx_code code = GET_CODE (x);
456 return queued_subexp_p (XEXP (x, 0));
460 return (queued_subexp_p (XEXP (x, 0))
461 || queued_subexp_p (XEXP (x, 1)));
467 /* Perform all the pending incrementations. */
473 while ((p = pending_chain))
475 rtx body = QUEUED_BODY (p);
477 if (GET_CODE (body) == SEQUENCE)
479 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
480 emit_insn (QUEUED_BODY (p));
483 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
484 pending_chain = QUEUED_NEXT (p);
488 /* Copy data from FROM to TO, where the machine modes are not the same.
489 Both modes may be integer, or both may be floating.
490 UNSIGNEDP should be nonzero if FROM is an unsigned type.
491 This causes zero-extension instead of sign-extension. */
494 convert_move (to, from, unsignedp)
498 enum machine_mode to_mode = GET_MODE (to);
499 enum machine_mode from_mode = GET_MODE (from);
500 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
501 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
505 /* rtx code for making an equivalent value. */
506 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
508 to = protect_from_queue (to, 1);
509 from = protect_from_queue (from, 0);
511 if (to_real != from_real)
514 /* If FROM is a SUBREG that indicates that we have already done at least
515 the required extension, strip it. We don't handle such SUBREGs as
518 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
519 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
520 >= GET_MODE_SIZE (to_mode))
521 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
522 from = gen_lowpart (to_mode, from), from_mode = to_mode;
524 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
527 if (to_mode == from_mode
528 || (from_mode == VOIDmode && CONSTANT_P (from)))
530 emit_move_insn (to, from);
534 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
536 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
539 if (VECTOR_MODE_P (to_mode))
540 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
542 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
544 emit_move_insn (to, from);
548 if (to_real != from_real)
555 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
557 /* Try converting directly if the insn is supported. */
558 if ((code = can_extend_p (to_mode, from_mode, 0))
561 emit_unop_insn (code, to, from, UNKNOWN);
566 #ifdef HAVE_trunchfqf2
567 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
569 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
573 #ifdef HAVE_trunctqfqf2
574 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
576 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
580 #ifdef HAVE_truncsfqf2
581 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
583 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
587 #ifdef HAVE_truncdfqf2
588 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
590 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
594 #ifdef HAVE_truncxfqf2
595 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
597 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
601 #ifdef HAVE_trunctfqf2
602 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
604 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
609 #ifdef HAVE_trunctqfhf2
610 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
612 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
616 #ifdef HAVE_truncsfhf2
617 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
619 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
623 #ifdef HAVE_truncdfhf2
624 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
626 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
630 #ifdef HAVE_truncxfhf2
631 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
633 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
637 #ifdef HAVE_trunctfhf2
638 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
640 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
645 #ifdef HAVE_truncsftqf2
646 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
648 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
652 #ifdef HAVE_truncdftqf2
653 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
655 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
659 #ifdef HAVE_truncxftqf2
660 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
662 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
666 #ifdef HAVE_trunctftqf2
667 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
669 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
674 #ifdef HAVE_truncdfsf2
675 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
677 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
681 #ifdef HAVE_truncxfsf2
682 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
684 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
688 #ifdef HAVE_trunctfsf2
689 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
691 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
695 #ifdef HAVE_truncxfdf2
696 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
698 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
702 #ifdef HAVE_trunctfdf2
703 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
705 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
717 libcall = extendsfdf2_libfunc;
721 libcall = extendsfxf2_libfunc;
725 libcall = extendsftf2_libfunc;
737 libcall = truncdfsf2_libfunc;
741 libcall = extenddfxf2_libfunc;
745 libcall = extenddftf2_libfunc;
757 libcall = truncxfsf2_libfunc;
761 libcall = truncxfdf2_libfunc;
773 libcall = trunctfsf2_libfunc;
777 libcall = trunctfdf2_libfunc;
789 if (libcall == (rtx) 0)
790 /* This conversion is not implemented yet. */
794 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
796 insns = get_insns ();
798 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
803 /* Now both modes are integers. */
805 /* Handle expanding beyond a word. */
806 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
807 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
814 enum machine_mode lowpart_mode;
815 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
817 /* Try converting directly if the insn is supported. */
818 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
821 /* If FROM is a SUBREG, put it into a register. Do this
822 so that we always generate the same set of insns for
823 better cse'ing; if an intermediate assignment occurred,
824 we won't be doing the operation directly on the SUBREG. */
825 if (optimize > 0 && GET_CODE (from) == SUBREG)
826 from = force_reg (from_mode, from);
827 emit_unop_insn (code, to, from, equiv_code);
830 /* Next, try converting via full word. */
831 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
832 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
833 != CODE_FOR_nothing))
835 if (GET_CODE (to) == REG)
836 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
837 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
838 emit_unop_insn (code, to,
839 gen_lowpart (word_mode, to), equiv_code);
843 /* No special multiword conversion insn; do it by hand. */
846 /* Since we will turn this into a no conflict block, we must ensure
847 that the source does not overlap the target. */
849 if (reg_overlap_mentioned_p (to, from))
850 from = force_reg (from_mode, from);
852 /* Get a copy of FROM widened to a word, if necessary. */
853 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
854 lowpart_mode = word_mode;
856 lowpart_mode = from_mode;
858 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
860 lowpart = gen_lowpart (lowpart_mode, to);
861 emit_move_insn (lowpart, lowfrom);
863 /* Compute the value to put in each remaining word. */
865 fill_value = const0_rtx;
870 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
871 && STORE_FLAG_VALUE == -1)
873 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
875 fill_value = gen_reg_rtx (word_mode);
876 emit_insn (gen_slt (fill_value));
882 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
883 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
885 fill_value = convert_to_mode (word_mode, fill_value, 1);
889 /* Fill the remaining words. */
890 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
892 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
893 rtx subword = operand_subword (to, index, 1, to_mode);
898 if (fill_value != subword)
899 emit_move_insn (subword, fill_value);
902 insns = get_insns ();
905 emit_no_conflict_block (insns, to, from, NULL_RTX,
906 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
910 /* Truncating multi-word to a word or less. */
911 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
912 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
914 if (!((GET_CODE (from) == MEM
915 && ! MEM_VOLATILE_P (from)
916 && direct_load[(int) to_mode]
917 && ! mode_dependent_address_p (XEXP (from, 0)))
918 || GET_CODE (from) == REG
919 || GET_CODE (from) == SUBREG))
920 from = force_reg (from_mode, from);
921 convert_move (to, gen_lowpart (word_mode, from), 0);
925 /* Handle pointer conversion. */ /* SPEE 900220. */
926 if (to_mode == PQImode)
928 if (from_mode != QImode)
929 from = convert_to_mode (QImode, from, unsignedp);
931 #ifdef HAVE_truncqipqi2
932 if (HAVE_truncqipqi2)
934 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
937 #endif /* HAVE_truncqipqi2 */
941 if (from_mode == PQImode)
943 if (to_mode != QImode)
945 from = convert_to_mode (QImode, from, unsignedp);
950 #ifdef HAVE_extendpqiqi2
951 if (HAVE_extendpqiqi2)
953 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
956 #endif /* HAVE_extendpqiqi2 */
961 if (to_mode == PSImode)
963 if (from_mode != SImode)
964 from = convert_to_mode (SImode, from, unsignedp);
966 #ifdef HAVE_truncsipsi2
967 if (HAVE_truncsipsi2)
969 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
972 #endif /* HAVE_truncsipsi2 */
976 if (from_mode == PSImode)
978 if (to_mode != SImode)
980 from = convert_to_mode (SImode, from, unsignedp);
985 #ifdef HAVE_extendpsisi2
986 if (! unsignedp && HAVE_extendpsisi2)
988 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
991 #endif /* HAVE_extendpsisi2 */
992 #ifdef HAVE_zero_extendpsisi2
993 if (unsignedp && HAVE_zero_extendpsisi2)
995 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
998 #endif /* HAVE_zero_extendpsisi2 */
1003 if (to_mode == PDImode)
1005 if (from_mode != DImode)
1006 from = convert_to_mode (DImode, from, unsignedp);
1008 #ifdef HAVE_truncdipdi2
1009 if (HAVE_truncdipdi2)
1011 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1014 #endif /* HAVE_truncdipdi2 */
1018 if (from_mode == PDImode)
1020 if (to_mode != DImode)
1022 from = convert_to_mode (DImode, from, unsignedp);
1027 #ifdef HAVE_extendpdidi2
1028 if (HAVE_extendpdidi2)
1030 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1033 #endif /* HAVE_extendpdidi2 */
1038 /* Now follow all the conversions between integers
1039 no more than a word long. */
1041 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1042 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1043 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1044 GET_MODE_BITSIZE (from_mode)))
1046 if (!((GET_CODE (from) == MEM
1047 && ! MEM_VOLATILE_P (from)
1048 && direct_load[(int) to_mode]
1049 && ! mode_dependent_address_p (XEXP (from, 0)))
1050 || GET_CODE (from) == REG
1051 || GET_CODE (from) == SUBREG))
1052 from = force_reg (from_mode, from);
1053 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1054 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1055 from = copy_to_reg (from);
1056 emit_move_insn (to, gen_lowpart (to_mode, from));
1060 /* Handle extension. */
1061 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1063 /* Convert directly if that works. */
1064 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1065 != CODE_FOR_nothing)
1068 from = force_not_mem (from);
1070 emit_unop_insn (code, to, from, equiv_code);
1075 enum machine_mode intermediate;
1079 /* Search for a mode to convert via. */
1080 for (intermediate = from_mode; intermediate != VOIDmode;
1081 intermediate = GET_MODE_WIDER_MODE (intermediate))
1082 if (((can_extend_p (to_mode, intermediate, unsignedp)
1083 != CODE_FOR_nothing)
1084 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1085 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1086 GET_MODE_BITSIZE (intermediate))))
1087 && (can_extend_p (intermediate, from_mode, unsignedp)
1088 != CODE_FOR_nothing))
1090 convert_move (to, convert_to_mode (intermediate, from,
1091 unsignedp), unsignedp);
1095 /* No suitable intermediate mode.
1096 Generate what we need with shifts. */
1097 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1098 - GET_MODE_BITSIZE (from_mode), 0);
1099 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1100 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1102 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1105 emit_move_insn (to, tmp);
1110 /* Support special truncate insns for certain modes. */
1112 if (from_mode == DImode && to_mode == SImode)
1114 #ifdef HAVE_truncdisi2
1115 if (HAVE_truncdisi2)
1117 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1121 convert_move (to, force_reg (from_mode, from), unsignedp);
1125 if (from_mode == DImode && to_mode == HImode)
1127 #ifdef HAVE_truncdihi2
1128 if (HAVE_truncdihi2)
1130 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1134 convert_move (to, force_reg (from_mode, from), unsignedp);
1138 if (from_mode == DImode && to_mode == QImode)
1140 #ifdef HAVE_truncdiqi2
1141 if (HAVE_truncdiqi2)
1143 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1151 if (from_mode == SImode && to_mode == HImode)
1153 #ifdef HAVE_truncsihi2
1154 if (HAVE_truncsihi2)
1156 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1164 if (from_mode == SImode && to_mode == QImode)
1166 #ifdef HAVE_truncsiqi2
1167 if (HAVE_truncsiqi2)
1169 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1177 if (from_mode == HImode && to_mode == QImode)
1179 #ifdef HAVE_trunchiqi2
1180 if (HAVE_trunchiqi2)
1182 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1190 if (from_mode == TImode && to_mode == DImode)
1192 #ifdef HAVE_trunctidi2
1193 if (HAVE_trunctidi2)
1195 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1203 if (from_mode == TImode && to_mode == SImode)
1205 #ifdef HAVE_trunctisi2
1206 if (HAVE_trunctisi2)
1208 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1216 if (from_mode == TImode && to_mode == HImode)
1218 #ifdef HAVE_trunctihi2
1219 if (HAVE_trunctihi2)
1221 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1229 if (from_mode == TImode && to_mode == QImode)
1231 #ifdef HAVE_trunctiqi2
1232 if (HAVE_trunctiqi2)
1234 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1242 /* Handle truncation of volatile memrefs, and so on;
1243 the things that couldn't be truncated directly,
1244 and for which there was no special instruction. */
1245 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1247 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1248 emit_move_insn (to, temp);
1252 /* Mode combination is not recognized. */
1256 /* Return an rtx for a value that would result
1257 from converting X to mode MODE.
1258 Both X and MODE may be floating, or both integer.
1259 UNSIGNEDP is nonzero if X is an unsigned value.
1260 This can be done by referring to a part of X in place
1261 or by copying to a new temporary with conversion.
1263 This function *must not* call protect_from_queue
1264 except when putting X into an insn (in which case convert_move does it). */
1267 convert_to_mode (mode, x, unsignedp)
1268 enum machine_mode mode;
1272 return convert_modes (mode, VOIDmode, x, unsignedp);
1275 /* Return an rtx for a value that would result
1276 from converting X from mode OLDMODE to mode MODE.
1277 Both modes may be floating, or both integer.
1278 UNSIGNEDP is nonzero if X is an unsigned value.
1280 This can be done by referring to a part of X in place
1281 or by copying to a new temporary with conversion.
1283 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1285 This function *must not* call protect_from_queue
1286 except when putting X into an insn (in which case convert_move does it). */
1289 convert_modes (mode, oldmode, x, unsignedp)
1290 enum machine_mode mode, oldmode;
1296 /* If FROM is a SUBREG that indicates that we have already done at least
1297 the required extension, strip it. */
1299 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1300 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1301 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1302 x = gen_lowpart (mode, x);
1304 if (GET_MODE (x) != VOIDmode)
1305 oldmode = GET_MODE (x);
1307 if (mode == oldmode)
1310 /* There is one case that we must handle specially: If we are converting
1311 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1312 we are to interpret the constant as unsigned, gen_lowpart will do
1313 the wrong if the constant appears negative. What we want to do is
1314 make the high-order word of the constant zero, not all ones. */
1316 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1317 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1318 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1320 HOST_WIDE_INT val = INTVAL (x);
1322 if (oldmode != VOIDmode
1323 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1325 int width = GET_MODE_BITSIZE (oldmode);
1327 /* We need to zero extend VAL. */
1328 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1331 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1334 /* We can do this with a gen_lowpart if both desired and current modes
1335 are integer, and this is either a constant integer, a register, or a
1336 non-volatile MEM. Except for the constant case where MODE is no
1337 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1339 if ((GET_CODE (x) == CONST_INT
1340 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1341 || (GET_MODE_CLASS (mode) == MODE_INT
1342 && GET_MODE_CLASS (oldmode) == MODE_INT
1343 && (GET_CODE (x) == CONST_DOUBLE
1344 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1345 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1346 && direct_load[(int) mode])
1347 || (GET_CODE (x) == REG
1348 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1349 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1351 /* ?? If we don't know OLDMODE, we have to assume here that
1352 X does not need sign- or zero-extension. This may not be
1353 the case, but it's the best we can do. */
1354 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1355 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1357 HOST_WIDE_INT val = INTVAL (x);
1358 int width = GET_MODE_BITSIZE (oldmode);
1360 /* We must sign or zero-extend in this case. Start by
1361 zero-extending, then sign extend if we need to. */
1362 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1364 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1365 val |= (HOST_WIDE_INT) (-1) << width;
1367 return gen_int_mode (val, mode);
1370 return gen_lowpart (mode, x);
1373 temp = gen_reg_rtx (mode);
1374 convert_move (temp, x, unsignedp);
1378 /* This macro is used to determine what the largest unit size that
1379 move_by_pieces can use is. */
1381 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1382 move efficiently, as opposed to MOVE_MAX which is the maximum
1383 number of bytes we can move with a single instruction. */
1385 #ifndef MOVE_MAX_PIECES
1386 #define MOVE_MAX_PIECES MOVE_MAX
1389 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1390 store efficiently. Due to internal GCC limitations, this is
1391 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1392 for an immediate constant. */
1394 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1396 /* Generate several move instructions to copy LEN bytes from block FROM to
1397 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1398 and TO through protect_from_queue before calling.
1400 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1401 used to push FROM to the stack.
1403 ALIGN is maximum alignment we can assume. */
1406 move_by_pieces (to, from, len, align)
1408 unsigned HOST_WIDE_INT len;
1411 struct move_by_pieces data;
1412 rtx to_addr, from_addr = XEXP (from, 0);
1413 unsigned int max_size = MOVE_MAX_PIECES + 1;
1414 enum machine_mode mode = VOIDmode, tmode;
1415 enum insn_code icode;
1418 data.from_addr = from_addr;
1421 to_addr = XEXP (to, 0);
1424 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1425 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1427 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1434 #ifdef STACK_GROWS_DOWNWARD
1440 data.to_addr = to_addr;
1443 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1444 || GET_CODE (from_addr) == POST_INC
1445 || GET_CODE (from_addr) == POST_DEC);
1447 data.explicit_inc_from = 0;
1448 data.explicit_inc_to = 0;
1449 if (data.reverse) data.offset = len;
1452 /* If copying requires more than two move insns,
1453 copy addresses to registers (to make displacements shorter)
1454 and use post-increment if available. */
1455 if (!(data.autinc_from && data.autinc_to)
1456 && move_by_pieces_ninsns (len, align) > 2)
1458 /* Find the mode of the largest move... */
1459 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1460 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1461 if (GET_MODE_SIZE (tmode) < max_size)
1464 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1466 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1467 data.autinc_from = 1;
1468 data.explicit_inc_from = -1;
1470 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1472 data.from_addr = copy_addr_to_reg (from_addr);
1473 data.autinc_from = 1;
1474 data.explicit_inc_from = 1;
1476 if (!data.autinc_from && CONSTANT_P (from_addr))
1477 data.from_addr = copy_addr_to_reg (from_addr);
1478 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1480 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1482 data.explicit_inc_to = -1;
1484 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1486 data.to_addr = copy_addr_to_reg (to_addr);
1488 data.explicit_inc_to = 1;
1490 if (!data.autinc_to && CONSTANT_P (to_addr))
1491 data.to_addr = copy_addr_to_reg (to_addr);
1494 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1495 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1496 align = MOVE_MAX * BITS_PER_UNIT;
1498 /* First move what we can in the largest integer mode, then go to
1499 successively smaller modes. */
1501 while (max_size > 1)
1503 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1504 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1505 if (GET_MODE_SIZE (tmode) < max_size)
1508 if (mode == VOIDmode)
1511 icode = mov_optab->handlers[(int) mode].insn_code;
1512 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1513 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1515 max_size = GET_MODE_SIZE (mode);
1518 /* The code above should have handled everything. */
1523 /* Return number of insns required to move L bytes by pieces.
1524 ALIGN (in bits) is maximum alignment we can assume. */
1526 static unsigned HOST_WIDE_INT
1527 move_by_pieces_ninsns (l, align)
1528 unsigned HOST_WIDE_INT l;
1531 unsigned HOST_WIDE_INT n_insns = 0;
1532 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1534 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1535 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1536 align = MOVE_MAX * BITS_PER_UNIT;
1538 while (max_size > 1)
1540 enum machine_mode mode = VOIDmode, tmode;
1541 enum insn_code icode;
1543 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1544 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1545 if (GET_MODE_SIZE (tmode) < max_size)
1548 if (mode == VOIDmode)
1551 icode = mov_optab->handlers[(int) mode].insn_code;
1552 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1553 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1555 max_size = GET_MODE_SIZE (mode);
1563 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1564 with move instructions for mode MODE. GENFUN is the gen_... function
1565 to make a move insn for that mode. DATA has all the other info. */
1568 move_by_pieces_1 (genfun, mode, data)
1569 rtx (*genfun) PARAMS ((rtx, ...));
1570 enum machine_mode mode;
1571 struct move_by_pieces *data;
1573 unsigned int size = GET_MODE_SIZE (mode);
1574 rtx to1 = NULL_RTX, from1;
1576 while (data->len >= size)
1579 data->offset -= size;
1583 if (data->autinc_to)
1584 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1587 to1 = adjust_address (data->to, mode, data->offset);
1590 if (data->autinc_from)
1591 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1594 from1 = adjust_address (data->from, mode, data->offset);
1596 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1597 emit_insn (gen_add2_insn (data->to_addr,
1598 GEN_INT (-(HOST_WIDE_INT)size)));
1599 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1600 emit_insn (gen_add2_insn (data->from_addr,
1601 GEN_INT (-(HOST_WIDE_INT)size)));
1604 emit_insn ((*genfun) (to1, from1));
1607 #ifdef PUSH_ROUNDING
1608 emit_single_push_insn (mode, from1, NULL);
1614 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1615 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1616 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1617 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1619 if (! data->reverse)
1620 data->offset += size;
1626 /* Emit code to move a block Y to a block X.
1627 This may be done with string-move instructions,
1628 with multiple scalar move instructions, or with a library call.
1630 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1632 SIZE is an rtx that says how long they are.
1633 ALIGN is the maximum alignment we can assume they have.
1635 Return the address of the new block, if memcpy is called and returns it,
1638 static GTY(()) tree block_move_fn;
1640 emit_block_move (x, y, size)
1645 #ifdef TARGET_MEM_FUNCTIONS
1646 tree call_expr, arg_list;
1648 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1650 if (GET_MODE (x) != BLKmode)
1653 if (GET_MODE (y) != BLKmode)
1656 x = protect_from_queue (x, 1);
1657 y = protect_from_queue (y, 0);
1658 size = protect_from_queue (size, 0);
1660 if (GET_CODE (x) != MEM)
1662 if (GET_CODE (y) != MEM)
1667 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1668 move_by_pieces (x, y, INTVAL (size), align);
1671 /* Try the most limited insn first, because there's no point
1672 including more than one in the machine description unless
1673 the more limited one has some advantage. */
1675 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1676 enum machine_mode mode;
1678 /* Since this is a move insn, we don't care about volatility. */
1681 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1682 mode = GET_MODE_WIDER_MODE (mode))
1684 enum insn_code code = movstr_optab[(int) mode];
1685 insn_operand_predicate_fn pred;
1687 if (code != CODE_FOR_nothing
1688 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1689 here because if SIZE is less than the mode mask, as it is
1690 returned by the macro, it will definitely be less than the
1691 actual mode mask. */
1692 && ((GET_CODE (size) == CONST_INT
1693 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1694 <= (GET_MODE_MASK (mode) >> 1)))
1695 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1696 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1697 || (*pred) (x, BLKmode))
1698 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1699 || (*pred) (y, BLKmode))
1700 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1701 || (*pred) (opalign, VOIDmode)))
1704 rtx last = get_last_insn ();
1707 op2 = convert_to_mode (mode, size, 1);
1708 pred = insn_data[(int) code].operand[2].predicate;
1709 if (pred != 0 && ! (*pred) (op2, mode))
1710 op2 = copy_to_mode_reg (mode, op2);
1712 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1720 delete_insns_since (last);
1726 /* X, Y, or SIZE may have been passed through protect_from_queue.
1728 It is unsafe to save the value generated by protect_from_queue
1729 and reuse it later. Consider what happens if emit_queue is
1730 called before the return value from protect_from_queue is used.
1732 Expansion of the CALL_EXPR below will call emit_queue before
1733 we are finished emitting RTL for argument setup. So if we are
1734 not careful we could get the wrong value for an argument.
1736 To avoid this problem we go ahead and emit code to copy X, Y &
1737 SIZE into new pseudos. We can then place those new pseudos
1738 into an RTL_EXPR and use them later, even after a call to
1741 Note this is not strictly needed for library calls since they
1742 do not call emit_queue before loading their arguments. However,
1743 we may need to have library calls call emit_queue in the future
1744 since failing to do so could cause problems for targets which
1745 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1746 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1747 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1749 #ifdef TARGET_MEM_FUNCTIONS
1750 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1752 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1753 TREE_UNSIGNED (integer_type_node));
1754 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1757 #ifdef TARGET_MEM_FUNCTIONS
1758 /* It is incorrect to use the libcall calling conventions to call
1759 memcpy in this context.
1761 This could be a user call to memcpy and the user may wish to
1762 examine the return value from memcpy.
1764 For targets where libcalls and normal calls have different conventions
1765 for returning pointers, we could end up generating incorrect code.
1767 So instead of using a libcall sequence we build up a suitable
1768 CALL_EXPR and expand the call in the normal fashion. */
1769 if (block_move_fn == NULL_TREE)
1773 /* This was copied from except.c, I don't know if all this is
1774 necessary in this context or not. */
1775 block_move_fn = get_identifier ("memcpy");
1776 fntype = build_pointer_type (void_type_node);
1777 fntype = build_function_type (fntype, NULL_TREE);
1778 block_move_fn = build_decl (FUNCTION_DECL, block_move_fn, fntype);
1779 DECL_EXTERNAL (block_move_fn) = 1;
1780 TREE_PUBLIC (block_move_fn) = 1;
1781 DECL_ARTIFICIAL (block_move_fn) = 1;
1782 TREE_NOTHROW (block_move_fn) = 1;
1783 make_decl_rtl (block_move_fn, NULL);
1784 assemble_external (block_move_fn);
1787 /* We need to make an argument list for the function call.
1789 memcpy has three arguments, the first two are void * addresses and
1790 the last is a size_t byte count for the copy. */
1792 = build_tree_list (NULL_TREE,
1793 make_tree (build_pointer_type (void_type_node), x));
1794 TREE_CHAIN (arg_list)
1795 = build_tree_list (NULL_TREE,
1796 make_tree (build_pointer_type (void_type_node), y));
1797 TREE_CHAIN (TREE_CHAIN (arg_list))
1798 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1799 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1801 /* Now we have to build up the CALL_EXPR itself. */
1802 call_expr = build1 (ADDR_EXPR,
1803 build_pointer_type (TREE_TYPE (block_move_fn)),
1805 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (block_move_fn)),
1806 call_expr, arg_list, NULL_TREE);
1807 TREE_SIDE_EFFECTS (call_expr) = 1;
1809 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1811 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1812 VOIDmode, 3, y, Pmode, x, Pmode,
1813 convert_to_mode (TYPE_MODE (integer_type_node), size,
1814 TREE_UNSIGNED (integer_type_node)),
1815 TYPE_MODE (integer_type_node));
1818 /* If we are initializing a readonly value, show the above call
1819 clobbered it. Otherwise, a load from it may erroneously be hoisted
1821 if (RTX_UNCHANGING_P (x))
1822 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1828 /* Copy all or part of a value X into registers starting at REGNO.
1829 The number of registers to be filled is NREGS. */
1832 move_block_to_reg (regno, x, nregs, mode)
1836 enum machine_mode mode;
1839 #ifdef HAVE_load_multiple
1847 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1848 x = validize_mem (force_const_mem (mode, x));
1850 /* See if the machine can do this with a load multiple insn. */
1851 #ifdef HAVE_load_multiple
1852 if (HAVE_load_multiple)
1854 last = get_last_insn ();
1855 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1863 delete_insns_since (last);
1867 for (i = 0; i < nregs; i++)
1868 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1869 operand_subword_force (x, i, mode));
1872 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1873 The number of registers to be filled is NREGS. SIZE indicates the number
1874 of bytes in the object X. */
1877 move_block_from_reg (regno, x, nregs, size)
1884 #ifdef HAVE_store_multiple
1888 enum machine_mode mode;
1893 /* If SIZE is that of a mode no bigger than a word, just use that
1894 mode's store operation. */
1895 if (size <= UNITS_PER_WORD
1896 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1897 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1899 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1903 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1904 to the left before storing to memory. Note that the previous test
1905 doesn't handle all cases (e.g. SIZE == 3). */
1906 if (size < UNITS_PER_WORD
1908 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1910 rtx tem = operand_subword (x, 0, 1, BLKmode);
1916 shift = expand_shift (LSHIFT_EXPR, word_mode,
1917 gen_rtx_REG (word_mode, regno),
1918 build_int_2 ((UNITS_PER_WORD - size)
1919 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1920 emit_move_insn (tem, shift);
1924 /* See if the machine can do this with a store multiple insn. */
1925 #ifdef HAVE_store_multiple
1926 if (HAVE_store_multiple)
1928 last = get_last_insn ();
1929 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1937 delete_insns_since (last);
1941 for (i = 0; i < nregs; i++)
1943 rtx tem = operand_subword (x, i, 1, BLKmode);
1948 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1952 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1953 registers represented by a PARALLEL. SSIZE represents the total size of
1954 block SRC in bytes, or -1 if not known. */
1955 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1956 the balance will be in what would be the low-order memory addresses, i.e.
1957 left justified for big endian, right justified for little endian. This
1958 happens to be true for the targets currently using this support. If this
1959 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1963 emit_group_load (dst, orig_src, ssize)
1970 if (GET_CODE (dst) != PARALLEL)
1973 /* Check for a NULL entry, used to indicate that the parameter goes
1974 both on the stack and in registers. */
1975 if (XEXP (XVECEXP (dst, 0, 0), 0))
1980 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1982 /* Process the pieces. */
1983 for (i = start; i < XVECLEN (dst, 0); i++)
1985 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1986 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1987 unsigned int bytelen = GET_MODE_SIZE (mode);
1990 /* Handle trailing fragments that run over the size of the struct. */
1991 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1993 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1994 bytelen = ssize - bytepos;
1999 /* If we won't be loading directly from memory, protect the real source
2000 from strange tricks we might play; but make sure that the source can
2001 be loaded directly into the destination. */
2003 if (GET_CODE (orig_src) != MEM
2004 && (!CONSTANT_P (orig_src)
2005 || (GET_MODE (orig_src) != mode
2006 && GET_MODE (orig_src) != VOIDmode)))
2008 if (GET_MODE (orig_src) == VOIDmode)
2009 src = gen_reg_rtx (mode);
2011 src = gen_reg_rtx (GET_MODE (orig_src));
2013 emit_move_insn (src, orig_src);
2016 /* Optimize the access just a bit. */
2017 if (GET_CODE (src) == MEM
2018 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2019 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2020 && bytelen == GET_MODE_SIZE (mode))
2022 tmps[i] = gen_reg_rtx (mode);
2023 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2025 else if (GET_CODE (src) == CONCAT)
2028 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2029 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2030 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2032 tmps[i] = XEXP (src, bytepos != 0);
2033 if (! CONSTANT_P (tmps[i])
2034 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2035 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2036 0, 1, NULL_RTX, mode, mode, ssize);
2038 else if (bytepos == 0)
2040 rtx mem = assign_stack_temp (GET_MODE (src),
2041 GET_MODE_SIZE (GET_MODE (src)), 0);
2042 emit_move_insn (mem, src);
2043 tmps[i] = adjust_address (mem, mode, 0);
2048 else if (CONSTANT_P (src)
2049 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2052 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2053 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2056 if (BYTES_BIG_ENDIAN && shift)
2057 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2058 tmps[i], 0, OPTAB_WIDEN);
2063 /* Copy the extracted pieces into the proper (probable) hard regs. */
2064 for (i = start; i < XVECLEN (dst, 0); i++)
2065 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2068 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2069 registers represented by a PARALLEL. SSIZE represents the total size of
2070 block DST, or -1 if not known. */
2073 emit_group_store (orig_dst, src, ssize)
2080 if (GET_CODE (src) != PARALLEL)
2083 /* Check for a NULL entry, used to indicate that the parameter goes
2084 both on the stack and in registers. */
2085 if (XEXP (XVECEXP (src, 0, 0), 0))
2090 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2092 /* Copy the (probable) hard regs into pseudos. */
2093 for (i = start; i < XVECLEN (src, 0); i++)
2095 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2096 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2097 emit_move_insn (tmps[i], reg);
2101 /* If we won't be storing directly into memory, protect the real destination
2102 from strange tricks we might play. */
2104 if (GET_CODE (dst) == PARALLEL)
2108 /* We can get a PARALLEL dst if there is a conditional expression in
2109 a return statement. In that case, the dst and src are the same,
2110 so no action is necessary. */
2111 if (rtx_equal_p (dst, src))
2114 /* It is unclear if we can ever reach here, but we may as well handle
2115 it. Allocate a temporary, and split this into a store/load to/from
2118 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2119 emit_group_store (temp, src, ssize);
2120 emit_group_load (dst, temp, ssize);
2123 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2125 dst = gen_reg_rtx (GET_MODE (orig_dst));
2126 /* Make life a bit easier for combine. */
2127 emit_move_insn (dst, const0_rtx);
2130 /* Process the pieces. */
2131 for (i = start; i < XVECLEN (src, 0); i++)
2133 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2134 enum machine_mode mode = GET_MODE (tmps[i]);
2135 unsigned int bytelen = GET_MODE_SIZE (mode);
2138 /* Handle trailing fragments that run over the size of the struct. */
2139 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2141 if (BYTES_BIG_ENDIAN)
2143 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2144 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2145 tmps[i], 0, OPTAB_WIDEN);
2147 bytelen = ssize - bytepos;
2150 if (GET_CODE (dst) == CONCAT)
2152 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2153 dest = XEXP (dst, 0);
2154 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2156 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2157 dest = XEXP (dst, 1);
2163 /* Optimize the access just a bit. */
2164 if (GET_CODE (dest) == MEM
2165 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2166 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2167 && bytelen == GET_MODE_SIZE (mode))
2168 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2170 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2171 mode, tmps[i], ssize);
2176 /* Copy from the pseudo into the (probable) hard reg. */
2177 if (GET_CODE (dst) == REG)
2178 emit_move_insn (orig_dst, dst);
2181 /* Generate code to copy a BLKmode object of TYPE out of a
2182 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2183 is null, a stack temporary is created. TGTBLK is returned.
2185 The primary purpose of this routine is to handle functions
2186 that return BLKmode structures in registers. Some machines
2187 (the PA for example) want to return all small structures
2188 in registers regardless of the structure's alignment. */
2191 copy_blkmode_from_reg (tgtblk, srcreg, type)
2196 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2197 rtx src = NULL, dst = NULL;
2198 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2199 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2203 tgtblk = assign_temp (build_qualified_type (type,
2205 | TYPE_QUAL_CONST)),
2207 preserve_temp_slots (tgtblk);
2210 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2211 into a new pseudo which is a full word.
2213 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2214 the wrong part of the register gets copied so we fake a type conversion
2216 if (GET_MODE (srcreg) != BLKmode
2217 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2219 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2220 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2222 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2225 /* Structures whose size is not a multiple of a word are aligned
2226 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2227 machine, this means we must skip the empty high order bytes when
2228 calculating the bit offset. */
2229 if (BYTES_BIG_ENDIAN
2230 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2231 && bytes % UNITS_PER_WORD)
2232 big_endian_correction
2233 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2235 /* Copy the structure BITSIZE bites at a time.
2237 We could probably emit more efficient code for machines which do not use
2238 strict alignment, but it doesn't seem worth the effort at the current
2240 for (bitpos = 0, xbitpos = big_endian_correction;
2241 bitpos < bytes * BITS_PER_UNIT;
2242 bitpos += bitsize, xbitpos += bitsize)
2244 /* We need a new source operand each time xbitpos is on a
2245 word boundary and when xbitpos == big_endian_correction
2246 (the first time through). */
2247 if (xbitpos % BITS_PER_WORD == 0
2248 || xbitpos == big_endian_correction)
2249 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2252 /* We need a new destination operand each time bitpos is on
2254 if (bitpos % BITS_PER_WORD == 0)
2255 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2257 /* Use xbitpos for the source extraction (right justified) and
2258 xbitpos for the destination store (left justified). */
2259 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2260 extract_bit_field (src, bitsize,
2261 xbitpos % BITS_PER_WORD, 1,
2262 NULL_RTX, word_mode, word_mode,
2270 /* Add a USE expression for REG to the (possibly empty) list pointed
2271 to by CALL_FUSAGE. REG must denote a hard register. */
2274 use_reg (call_fusage, reg)
2275 rtx *call_fusage, reg;
2277 if (GET_CODE (reg) != REG
2278 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2282 = gen_rtx_EXPR_LIST (VOIDmode,
2283 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2286 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2287 starting at REGNO. All of these registers must be hard registers. */
2290 use_regs (call_fusage, regno, nregs)
2297 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2300 for (i = 0; i < nregs; i++)
2301 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2304 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2305 PARALLEL REGS. This is for calls that pass values in multiple
2306 non-contiguous locations. The Irix 6 ABI has examples of this. */
2309 use_group_regs (call_fusage, regs)
2315 for (i = 0; i < XVECLEN (regs, 0); i++)
2317 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2319 /* A NULL entry means the parameter goes both on the stack and in
2320 registers. This can also be a MEM for targets that pass values
2321 partially on the stack and partially in registers. */
2322 if (reg != 0 && GET_CODE (reg) == REG)
2323 use_reg (call_fusage, reg);
2328 /* Determine whether the LEN bytes generated by CONSTFUN can be
2329 stored to memory using several move instructions. CONSTFUNDATA is
2330 a pointer which will be passed as argument in every CONSTFUN call.
2331 ALIGN is maximum alignment we can assume. Return nonzero if a
2332 call to store_by_pieces should succeed. */
2335 can_store_by_pieces (len, constfun, constfundata, align)
2336 unsigned HOST_WIDE_INT len;
2337 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2341 unsigned HOST_WIDE_INT max_size, l;
2342 HOST_WIDE_INT offset = 0;
2343 enum machine_mode mode, tmode;
2344 enum insn_code icode;
2348 if (! MOVE_BY_PIECES_P (len, align))
2351 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2352 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2353 align = MOVE_MAX * BITS_PER_UNIT;
2355 /* We would first store what we can in the largest integer mode, then go to
2356 successively smaller modes. */
2359 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2364 max_size = STORE_MAX_PIECES + 1;
2365 while (max_size > 1)
2367 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2368 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2369 if (GET_MODE_SIZE (tmode) < max_size)
2372 if (mode == VOIDmode)
2375 icode = mov_optab->handlers[(int) mode].insn_code;
2376 if (icode != CODE_FOR_nothing
2377 && align >= GET_MODE_ALIGNMENT (mode))
2379 unsigned int size = GET_MODE_SIZE (mode);
2386 cst = (*constfun) (constfundata, offset, mode);
2387 if (!LEGITIMATE_CONSTANT_P (cst))
2397 max_size = GET_MODE_SIZE (mode);
2400 /* The code above should have handled everything. */
2408 /* Generate several move instructions to store LEN bytes generated by
2409 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2410 pointer which will be passed as argument in every CONSTFUN call.
2411 ALIGN is maximum alignment we can assume. */
2414 store_by_pieces (to, len, constfun, constfundata, align)
2416 unsigned HOST_WIDE_INT len;
2417 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2421 struct store_by_pieces data;
2423 if (! MOVE_BY_PIECES_P (len, align))
2425 to = protect_from_queue (to, 1);
2426 data.constfun = constfun;
2427 data.constfundata = constfundata;
2430 store_by_pieces_1 (&data, align);
2433 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2434 rtx with BLKmode). The caller must pass TO through protect_from_queue
2435 before calling. ALIGN is maximum alignment we can assume. */
2438 clear_by_pieces (to, len, align)
2440 unsigned HOST_WIDE_INT len;
2443 struct store_by_pieces data;
2445 data.constfun = clear_by_pieces_1;
2446 data.constfundata = NULL;
2449 store_by_pieces_1 (&data, align);
2452 /* Callback routine for clear_by_pieces.
2453 Return const0_rtx unconditionally. */
2456 clear_by_pieces_1 (data, offset, mode)
2457 PTR data ATTRIBUTE_UNUSED;
2458 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2459 enum machine_mode mode ATTRIBUTE_UNUSED;
2464 /* Subroutine of clear_by_pieces and store_by_pieces.
2465 Generate several move instructions to store LEN bytes of block TO. (A MEM
2466 rtx with BLKmode). The caller must pass TO through protect_from_queue
2467 before calling. ALIGN is maximum alignment we can assume. */
2470 store_by_pieces_1 (data, align)
2471 struct store_by_pieces *data;
2474 rtx to_addr = XEXP (data->to, 0);
2475 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2476 enum machine_mode mode = VOIDmode, tmode;
2477 enum insn_code icode;
2480 data->to_addr = to_addr;
2482 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2483 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2485 data->explicit_inc_to = 0;
2487 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2489 data->offset = data->len;
2491 /* If storing requires more than two move insns,
2492 copy addresses to registers (to make displacements shorter)
2493 and use post-increment if available. */
2494 if (!data->autinc_to
2495 && move_by_pieces_ninsns (data->len, align) > 2)
2497 /* Determine the main mode we'll be using. */
2498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2500 if (GET_MODE_SIZE (tmode) < max_size)
2503 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2505 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2506 data->autinc_to = 1;
2507 data->explicit_inc_to = -1;
2510 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2511 && ! data->autinc_to)
2513 data->to_addr = copy_addr_to_reg (to_addr);
2514 data->autinc_to = 1;
2515 data->explicit_inc_to = 1;
2518 if ( !data->autinc_to && CONSTANT_P (to_addr))
2519 data->to_addr = copy_addr_to_reg (to_addr);
2522 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2523 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2524 align = MOVE_MAX * BITS_PER_UNIT;
2526 /* First store what we can in the largest integer mode, then go to
2527 successively smaller modes. */
2529 while (max_size > 1)
2531 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2532 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2533 if (GET_MODE_SIZE (tmode) < max_size)
2536 if (mode == VOIDmode)
2539 icode = mov_optab->handlers[(int) mode].insn_code;
2540 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2541 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2543 max_size = GET_MODE_SIZE (mode);
2546 /* The code above should have handled everything. */
2551 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2552 with move instructions for mode MODE. GENFUN is the gen_... function
2553 to make a move insn for that mode. DATA has all the other info. */
2556 store_by_pieces_2 (genfun, mode, data)
2557 rtx (*genfun) PARAMS ((rtx, ...));
2558 enum machine_mode mode;
2559 struct store_by_pieces *data;
2561 unsigned int size = GET_MODE_SIZE (mode);
2564 while (data->len >= size)
2567 data->offset -= size;
2569 if (data->autinc_to)
2570 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2573 to1 = adjust_address (data->to, mode, data->offset);
2575 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2576 emit_insn (gen_add2_insn (data->to_addr,
2577 GEN_INT (-(HOST_WIDE_INT) size)));
2579 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2580 emit_insn ((*genfun) (to1, cst));
2582 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2583 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2585 if (! data->reverse)
2586 data->offset += size;
2592 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2593 its length in bytes. */
2595 static GTY(()) tree block_clear_fn;
2597 clear_storage (object, size)
2601 #ifdef TARGET_MEM_FUNCTIONS
2602 tree call_expr, arg_list;
2605 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2606 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2608 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2609 just move a zero. Otherwise, do this a piece at a time. */
2610 if (GET_MODE (object) != BLKmode
2611 && GET_CODE (size) == CONST_INT
2612 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2613 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2616 object = protect_from_queue (object, 1);
2617 size = protect_from_queue (size, 0);
2619 if (GET_CODE (size) == CONST_INT
2620 && MOVE_BY_PIECES_P (INTVAL (size), align))
2621 clear_by_pieces (object, INTVAL (size), align);
2624 /* Try the most limited insn first, because there's no point
2625 including more than one in the machine description unless
2626 the more limited one has some advantage. */
2628 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2629 enum machine_mode mode;
2631 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2632 mode = GET_MODE_WIDER_MODE (mode))
2634 enum insn_code code = clrstr_optab[(int) mode];
2635 insn_operand_predicate_fn pred;
2637 if (code != CODE_FOR_nothing
2638 /* We don't need MODE to be narrower than
2639 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2640 the mode mask, as it is returned by the macro, it will
2641 definitely be less than the actual mode mask. */
2642 && ((GET_CODE (size) == CONST_INT
2643 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2644 <= (GET_MODE_MASK (mode) >> 1)))
2645 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2646 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2647 || (*pred) (object, BLKmode))
2648 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2649 || (*pred) (opalign, VOIDmode)))
2652 rtx last = get_last_insn ();
2655 op1 = convert_to_mode (mode, size, 1);
2656 pred = insn_data[(int) code].operand[1].predicate;
2657 if (pred != 0 && ! (*pred) (op1, mode))
2658 op1 = copy_to_mode_reg (mode, op1);
2660 pat = GEN_FCN ((int) code) (object, op1, opalign);
2667 delete_insns_since (last);
2671 /* OBJECT or SIZE may have been passed through protect_from_queue.
2673 It is unsafe to save the value generated by protect_from_queue
2674 and reuse it later. Consider what happens if emit_queue is
2675 called before the return value from protect_from_queue is used.
2677 Expansion of the CALL_EXPR below will call emit_queue before
2678 we are finished emitting RTL for argument setup. So if we are
2679 not careful we could get the wrong value for an argument.
2681 To avoid this problem we go ahead and emit code to copy OBJECT
2682 and SIZE into new pseudos. We can then place those new pseudos
2683 into an RTL_EXPR and use them later, even after a call to
2686 Note this is not strictly needed for library calls since they
2687 do not call emit_queue before loading their arguments. However,
2688 we may need to have library calls call emit_queue in the future
2689 since failing to do so could cause problems for targets which
2690 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2691 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2693 #ifdef TARGET_MEM_FUNCTIONS
2694 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2696 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2697 TREE_UNSIGNED (integer_type_node));
2698 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2701 #ifdef TARGET_MEM_FUNCTIONS
2702 /* It is incorrect to use the libcall calling conventions to call
2703 memset in this context.
2705 This could be a user call to memset and the user may wish to
2706 examine the return value from memset.
2708 For targets where libcalls and normal calls have different
2709 conventions for returning pointers, we could end up generating
2712 So instead of using a libcall sequence we build up a suitable
2713 CALL_EXPR and expand the call in the normal fashion. */
2714 if (block_clear_fn == NULL_TREE)
2718 /* This was copied from except.c, I don't know if all this is
2719 necessary in this context or not. */
2720 block_clear_fn = get_identifier ("memset");
2721 fntype = build_pointer_type (void_type_node);
2722 fntype = build_function_type (fntype, NULL_TREE);
2723 block_clear_fn = build_decl (FUNCTION_DECL, block_clear_fn,
2725 DECL_EXTERNAL (block_clear_fn) = 1;
2726 TREE_PUBLIC (block_clear_fn) = 1;
2727 DECL_ARTIFICIAL (block_clear_fn) = 1;
2728 TREE_NOTHROW (block_clear_fn) = 1;
2729 make_decl_rtl (block_clear_fn, NULL);
2730 assemble_external (block_clear_fn);
2733 /* We need to make an argument list for the function call.
2735 memset has three arguments, the first is a void * addresses, the
2736 second an integer with the initialization value, the last is a
2737 size_t byte count for the copy. */
2739 = build_tree_list (NULL_TREE,
2740 make_tree (build_pointer_type (void_type_node),
2742 TREE_CHAIN (arg_list)
2743 = build_tree_list (NULL_TREE,
2744 make_tree (integer_type_node, const0_rtx));
2745 TREE_CHAIN (TREE_CHAIN (arg_list))
2746 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2747 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2749 /* Now we have to build up the CALL_EXPR itself. */
2750 call_expr = build1 (ADDR_EXPR,
2751 build_pointer_type (TREE_TYPE (block_clear_fn)),
2753 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (block_clear_fn)),
2754 call_expr, arg_list, NULL_TREE);
2755 TREE_SIDE_EFFECTS (call_expr) = 1;
2757 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2759 emit_library_call (bzero_libfunc, LCT_NORMAL,
2760 VOIDmode, 2, object, Pmode, size,
2761 TYPE_MODE (integer_type_node));
2764 /* If we are initializing a readonly value, show the above call
2765 clobbered it. Otherwise, a load from it may erroneously be
2766 hoisted from a loop. */
2767 if (RTX_UNCHANGING_P (object))
2768 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2775 /* Generate code to copy Y into X.
2776 Both Y and X must have the same mode, except that
2777 Y can be a constant with VOIDmode.
2778 This mode cannot be BLKmode; use emit_block_move for that.
2780 Return the last instruction emitted. */
2783 emit_move_insn (x, y)
2786 enum machine_mode mode = GET_MODE (x);
2787 rtx y_cst = NULL_RTX;
2790 x = protect_from_queue (x, 1);
2791 y = protect_from_queue (y, 0);
2793 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2796 /* Never force constant_p_rtx to memory. */
2797 if (GET_CODE (y) == CONSTANT_P_RTX)
2799 else if (CONSTANT_P (y))
2802 && FLOAT_MODE_P (GET_MODE (x))
2803 && (last_insn = compress_float_constant (x, y)))
2806 if (!LEGITIMATE_CONSTANT_P (y))
2809 y = force_const_mem (mode, y);
2813 /* If X or Y are memory references, verify that their addresses are valid
2815 if (GET_CODE (x) == MEM
2816 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2817 && ! push_operand (x, GET_MODE (x)))
2819 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2820 x = validize_mem (x);
2822 if (GET_CODE (y) == MEM
2823 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2825 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2826 y = validize_mem (y);
2828 if (mode == BLKmode)
2831 last_insn = emit_move_insn_1 (x, y);
2833 if (y_cst && GET_CODE (x) == REG)
2834 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2839 /* Low level part of emit_move_insn.
2840 Called just like emit_move_insn, but assumes X and Y
2841 are basically valid. */
2844 emit_move_insn_1 (x, y)
2847 enum machine_mode mode = GET_MODE (x);
2848 enum machine_mode submode;
2849 enum mode_class class = GET_MODE_CLASS (mode);
2851 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2854 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2856 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2858 /* Expand complex moves by moving real part and imag part, if possible. */
2859 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2860 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2862 (class == MODE_COMPLEX_INT
2863 ? MODE_INT : MODE_FLOAT),
2865 && (mov_optab->handlers[(int) submode].insn_code
2866 != CODE_FOR_nothing))
2868 /* Don't split destination if it is a stack push. */
2869 int stack = push_operand (x, GET_MODE (x));
2871 #ifdef PUSH_ROUNDING
2872 /* In case we output to the stack, but the size is smaller machine can
2873 push exactly, we need to use move instructions. */
2875 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2876 != GET_MODE_SIZE (submode)))
2879 HOST_WIDE_INT offset1, offset2;
2881 /* Do not use anti_adjust_stack, since we don't want to update
2882 stack_pointer_delta. */
2883 temp = expand_binop (Pmode,
2884 #ifdef STACK_GROWS_DOWNWARD
2892 (GET_MODE_SIZE (GET_MODE (x)))),
2893 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2895 if (temp != stack_pointer_rtx)
2896 emit_move_insn (stack_pointer_rtx, temp);
2898 #ifdef STACK_GROWS_DOWNWARD
2900 offset2 = GET_MODE_SIZE (submode);
2902 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2903 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2904 + GET_MODE_SIZE (submode));
2907 emit_move_insn (change_address (x, submode,
2908 gen_rtx_PLUS (Pmode,
2910 GEN_INT (offset1))),
2911 gen_realpart (submode, y));
2912 emit_move_insn (change_address (x, submode,
2913 gen_rtx_PLUS (Pmode,
2915 GEN_INT (offset2))),
2916 gen_imagpart (submode, y));
2920 /* If this is a stack, push the highpart first, so it
2921 will be in the argument order.
2923 In that case, change_address is used only to convert
2924 the mode, not to change the address. */
2927 /* Note that the real part always precedes the imag part in memory
2928 regardless of machine's endianness. */
2929 #ifdef STACK_GROWS_DOWNWARD
2930 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2931 (gen_rtx_MEM (submode, XEXP (x, 0)),
2932 gen_imagpart (submode, y)));
2933 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2934 (gen_rtx_MEM (submode, XEXP (x, 0)),
2935 gen_realpart (submode, y)));
2937 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2938 (gen_rtx_MEM (submode, XEXP (x, 0)),
2939 gen_realpart (submode, y)));
2940 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2941 (gen_rtx_MEM (submode, XEXP (x, 0)),
2942 gen_imagpart (submode, y)));
2947 rtx realpart_x, realpart_y;
2948 rtx imagpart_x, imagpart_y;
2950 /* If this is a complex value with each part being smaller than a
2951 word, the usual calling sequence will likely pack the pieces into
2952 a single register. Unfortunately, SUBREG of hard registers only
2953 deals in terms of words, so we have a problem converting input
2954 arguments to the CONCAT of two registers that is used elsewhere
2955 for complex values. If this is before reload, we can copy it into
2956 memory and reload. FIXME, we should see about using extract and
2957 insert on integer registers, but complex short and complex char
2958 variables should be rarely used. */
2959 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2960 && (reload_in_progress | reload_completed) == 0)
2963 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2965 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2967 if (packed_dest_p || packed_src_p)
2969 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2970 ? MODE_FLOAT : MODE_INT);
2972 enum machine_mode reg_mode
2973 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2975 if (reg_mode != BLKmode)
2977 rtx mem = assign_stack_temp (reg_mode,
2978 GET_MODE_SIZE (mode), 0);
2979 rtx cmem = adjust_address (mem, mode, 0);
2982 = N_("function using short complex types cannot be inline");
2986 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2988 emit_move_insn_1 (cmem, y);
2989 return emit_move_insn_1 (sreg, mem);
2993 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2995 emit_move_insn_1 (mem, sreg);
2996 return emit_move_insn_1 (x, cmem);
3002 realpart_x = gen_realpart (submode, x);
3003 realpart_y = gen_realpart (submode, y);
3004 imagpart_x = gen_imagpart (submode, x);
3005 imagpart_y = gen_imagpart (submode, y);
3007 /* Show the output dies here. This is necessary for SUBREGs
3008 of pseudos since we cannot track their lifetimes correctly;
3009 hard regs shouldn't appear here except as return values.
3010 We never want to emit such a clobber after reload. */
3012 && ! (reload_in_progress || reload_completed)
3013 && (GET_CODE (realpart_x) == SUBREG
3014 || GET_CODE (imagpart_x) == SUBREG))
3015 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3017 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3018 (realpart_x, realpart_y));
3019 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3020 (imagpart_x, imagpart_y));
3023 return get_last_insn ();
3026 /* This will handle any multi-word mode that lacks a move_insn pattern.
3027 However, you will get better code if you define such patterns,
3028 even if they must turn into multiple assembler instructions. */
3029 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3036 #ifdef PUSH_ROUNDING
3038 /* If X is a push on the stack, do the push now and replace
3039 X with a reference to the stack pointer. */
3040 if (push_operand (x, GET_MODE (x)))
3045 /* Do not use anti_adjust_stack, since we don't want to update
3046 stack_pointer_delta. */
3047 temp = expand_binop (Pmode,
3048 #ifdef STACK_GROWS_DOWNWARD
3056 (GET_MODE_SIZE (GET_MODE (x)))),
3057 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3059 if (temp != stack_pointer_rtx)
3060 emit_move_insn (stack_pointer_rtx, temp);
3062 code = GET_CODE (XEXP (x, 0));
3064 /* Just hope that small offsets off SP are OK. */
3065 if (code == POST_INC)
3066 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3067 GEN_INT (-((HOST_WIDE_INT)
3068 GET_MODE_SIZE (GET_MODE (x)))));
3069 else if (code == POST_DEC)
3070 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3071 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3073 temp = stack_pointer_rtx;
3075 x = change_address (x, VOIDmode, temp);
3079 /* If we are in reload, see if either operand is a MEM whose address
3080 is scheduled for replacement. */
3081 if (reload_in_progress && GET_CODE (x) == MEM
3082 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3083 x = replace_equiv_address_nv (x, inner);
3084 if (reload_in_progress && GET_CODE (y) == MEM
3085 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3086 y = replace_equiv_address_nv (y, inner);
3092 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3095 rtx xpart = operand_subword (x, i, 1, mode);
3096 rtx ypart = operand_subword (y, i, 1, mode);
3098 /* If we can't get a part of Y, put Y into memory if it is a
3099 constant. Otherwise, force it into a register. If we still
3100 can't get a part of Y, abort. */
3101 if (ypart == 0 && CONSTANT_P (y))
3103 y = force_const_mem (mode, y);
3104 ypart = operand_subword (y, i, 1, mode);
3106 else if (ypart == 0)
3107 ypart = operand_subword_force (y, i, mode);
3109 if (xpart == 0 || ypart == 0)
3112 need_clobber |= (GET_CODE (xpart) == SUBREG);
3114 last_insn = emit_move_insn (xpart, ypart);
3117 seq = gen_sequence ();
3120 /* Show the output dies here. This is necessary for SUBREGs
3121 of pseudos since we cannot track their lifetimes correctly;
3122 hard regs shouldn't appear here except as return values.
3123 We never want to emit such a clobber after reload. */
3125 && ! (reload_in_progress || reload_completed)
3126 && need_clobber != 0)
3127 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3137 /* If Y is representable exactly in a narrower mode, and the target can
3138 perform the extension directly from constant or memory, then emit the
3139 move as an extension. */
3142 compress_float_constant (x, y)
3145 enum machine_mode dstmode = GET_MODE (x);
3146 enum machine_mode orig_srcmode = GET_MODE (y);
3147 enum machine_mode srcmode;
3150 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3152 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3153 srcmode != orig_srcmode;
3154 srcmode = GET_MODE_WIDER_MODE (srcmode))
3157 rtx trunc_y, last_insn;
3159 /* Skip if the target can't extend this way. */
3160 ic = can_extend_p (dstmode, srcmode, 0);
3161 if (ic == CODE_FOR_nothing)
3164 /* Skip if the narrowed value isn't exact. */
3165 if (! exact_real_truncate (srcmode, &r))
3168 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3170 if (LEGITIMATE_CONSTANT_P (trunc_y))
3172 /* Skip if the target needs extra instructions to perform
3174 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3177 else if (float_extend_from_mem[dstmode][srcmode])
3178 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3182 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3183 last_insn = get_last_insn ();
3185 if (GET_CODE (x) == REG)
3186 REG_NOTES (last_insn)
3187 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3195 /* Pushing data onto the stack. */
3197 /* Push a block of length SIZE (perhaps variable)
3198 and return an rtx to address the beginning of the block.
3199 Note that it is not possible for the value returned to be a QUEUED.
3200 The value may be virtual_outgoing_args_rtx.
3202 EXTRA is the number of bytes of padding to push in addition to SIZE.
3203 BELOW nonzero means this padding comes at low addresses;
3204 otherwise, the padding comes at high addresses. */
3207 push_block (size, extra, below)
3213 size = convert_modes (Pmode, ptr_mode, size, 1);
3214 if (CONSTANT_P (size))
3215 anti_adjust_stack (plus_constant (size, extra));
3216 else if (GET_CODE (size) == REG && extra == 0)
3217 anti_adjust_stack (size);
3220 temp = copy_to_mode_reg (Pmode, size);
3222 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3223 temp, 0, OPTAB_LIB_WIDEN);
3224 anti_adjust_stack (temp);
3227 #ifndef STACK_GROWS_DOWNWARD
3233 temp = virtual_outgoing_args_rtx;
3234 if (extra != 0 && below)
3235 temp = plus_constant (temp, extra);
3239 if (GET_CODE (size) == CONST_INT)
3240 temp = plus_constant (virtual_outgoing_args_rtx,
3241 -INTVAL (size) - (below ? 0 : extra));
3242 else if (extra != 0 && !below)
3243 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3244 negate_rtx (Pmode, plus_constant (size, extra)));
3246 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3247 negate_rtx (Pmode, size));
3250 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3253 #ifdef PUSH_ROUNDING
3255 /* Emit single push insn. */
3258 emit_single_push_insn (mode, x, type)
3260 enum machine_mode mode;
3264 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3266 enum insn_code icode;
3267 insn_operand_predicate_fn pred;
3269 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3270 /* If there is push pattern, use it. Otherwise try old way of throwing
3271 MEM representing push operation to move expander. */
3272 icode = push_optab->handlers[(int) mode].insn_code;
3273 if (icode != CODE_FOR_nothing)
3275 if (((pred = insn_data[(int) icode].operand[0].predicate)
3276 && !((*pred) (x, mode))))
3277 x = force_reg (mode, x);
3278 emit_insn (GEN_FCN (icode) (x));
3281 if (GET_MODE_SIZE (mode) == rounded_size)
3282 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3285 #ifdef STACK_GROWS_DOWNWARD
3286 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3287 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3289 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3290 GEN_INT (rounded_size));
3292 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3295 dest = gen_rtx_MEM (mode, dest_addr);
3299 set_mem_attributes (dest, type, 1);
3301 if (flag_optimize_sibling_calls)
3302 /* Function incoming arguments may overlap with sibling call
3303 outgoing arguments and we cannot allow reordering of reads
3304 from function arguments with stores to outgoing arguments
3305 of sibling calls. */
3306 set_mem_alias_set (dest, 0);
3308 emit_move_insn (dest, x);
3312 /* Generate code to push X onto the stack, assuming it has mode MODE and
3314 MODE is redundant except when X is a CONST_INT (since they don't
3316 SIZE is an rtx for the size of data to be copied (in bytes),
3317 needed only if X is BLKmode.
3319 ALIGN (in bits) is maximum alignment we can assume.
3321 If PARTIAL and REG are both nonzero, then copy that many of the first
3322 words of X into registers starting with REG, and push the rest of X.
3323 The amount of space pushed is decreased by PARTIAL words,
3324 rounded *down* to a multiple of PARM_BOUNDARY.
3325 REG must be a hard register in this case.
3326 If REG is zero but PARTIAL is not, take any all others actions for an
3327 argument partially in registers, but do not actually load any
3330 EXTRA is the amount in bytes of extra space to leave next to this arg.
3331 This is ignored if an argument block has already been allocated.
3333 On a machine that lacks real push insns, ARGS_ADDR is the address of
3334 the bottom of the argument block for this call. We use indexing off there
3335 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3336 argument block has not been preallocated.
3338 ARGS_SO_FAR is the size of args previously pushed for this call.
3340 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3341 for arguments passed in registers. If nonzero, it will be the number
3342 of bytes required. */
3345 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3346 args_addr, args_so_far, reg_parm_stack_space,
3349 enum machine_mode mode;
3358 int reg_parm_stack_space;
3362 enum direction stack_direction
3363 #ifdef STACK_GROWS_DOWNWARD
3369 /* Decide where to pad the argument: `downward' for below,
3370 `upward' for above, or `none' for don't pad it.
3371 Default is below for small data on big-endian machines; else above. */
3372 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3374 /* Invert direction if stack is post-decrement.
3376 if (STACK_PUSH_CODE == POST_DEC)
3377 if (where_pad != none)
3378 where_pad = (where_pad == downward ? upward : downward);
3380 xinner = x = protect_from_queue (x, 0);
3382 if (mode == BLKmode)
3384 /* Copy a block into the stack, entirely or partially. */
3387 int used = partial * UNITS_PER_WORD;
3388 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3396 /* USED is now the # of bytes we need not copy to the stack
3397 because registers will take care of them. */
3400 xinner = adjust_address (xinner, BLKmode, used);
3402 /* If the partial register-part of the arg counts in its stack size,
3403 skip the part of stack space corresponding to the registers.
3404 Otherwise, start copying to the beginning of the stack space,
3405 by setting SKIP to 0. */
3406 skip = (reg_parm_stack_space == 0) ? 0 : used;
3408 #ifdef PUSH_ROUNDING
3409 /* Do it with several push insns if that doesn't take lots of insns
3410 and if there is no difficulty with push insns that skip bytes
3411 on the stack for alignment purposes. */
3414 && GET_CODE (size) == CONST_INT
3416 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3417 /* Here we avoid the case of a structure whose weak alignment
3418 forces many pushes of a small amount of data,
3419 and such small pushes do rounding that causes trouble. */
3420 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3421 || align >= BIGGEST_ALIGNMENT
3422 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3423 == (align / BITS_PER_UNIT)))
3424 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3426 /* Push padding now if padding above and stack grows down,
3427 or if padding below and stack grows up.
3428 But if space already allocated, this has already been done. */
3429 if (extra && args_addr == 0
3430 && where_pad != none && where_pad != stack_direction)
3431 anti_adjust_stack (GEN_INT (extra));
3433 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3436 #endif /* PUSH_ROUNDING */
3440 /* Otherwise make space on the stack and copy the data
3441 to the address of that space. */
3443 /* Deduct words put into registers from the size we must copy. */
3446 if (GET_CODE (size) == CONST_INT)
3447 size = GEN_INT (INTVAL (size) - used);
3449 size = expand_binop (GET_MODE (size), sub_optab, size,
3450 GEN_INT (used), NULL_RTX, 0,
3454 /* Get the address of the stack space.
3455 In this case, we do not deal with EXTRA separately.
3456 A single stack adjust will do. */
3459 temp = push_block (size, extra, where_pad == downward);
3462 else if (GET_CODE (args_so_far) == CONST_INT)
3463 temp = memory_address (BLKmode,
3464 plus_constant (args_addr,
3465 skip + INTVAL (args_so_far)));
3467 temp = memory_address (BLKmode,
3468 plus_constant (gen_rtx_PLUS (Pmode,
3472 target = gen_rtx_MEM (BLKmode, temp);
3476 set_mem_attributes (target, type, 1);
3477 /* Function incoming arguments may overlap with sibling call
3478 outgoing arguments and we cannot allow reordering of reads
3479 from function arguments with stores to outgoing arguments
3480 of sibling calls. */
3481 set_mem_alias_set (target, 0);
3484 set_mem_align (target, align);
3486 /* TEMP is the address of the block. Copy the data there. */
3487 if (GET_CODE (size) == CONST_INT
3488 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3490 move_by_pieces (target, xinner, INTVAL (size), align);
3495 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3496 enum machine_mode mode;
3498 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3500 mode = GET_MODE_WIDER_MODE (mode))
3502 enum insn_code code = movstr_optab[(int) mode];
3503 insn_operand_predicate_fn pred;
3505 if (code != CODE_FOR_nothing
3506 && ((GET_CODE (size) == CONST_INT
3507 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3508 <= (GET_MODE_MASK (mode) >> 1)))
3509 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3510 && (!(pred = insn_data[(int) code].operand[0].predicate)
3511 || ((*pred) (target, BLKmode)))
3512 && (!(pred = insn_data[(int) code].operand[1].predicate)
3513 || ((*pred) (xinner, BLKmode)))
3514 && (!(pred = insn_data[(int) code].operand[3].predicate)
3515 || ((*pred) (opalign, VOIDmode))))
3517 rtx op2 = convert_to_mode (mode, size, 1);
3518 rtx last = get_last_insn ();
3521 pred = insn_data[(int) code].operand[2].predicate;
3522 if (pred != 0 && ! (*pred) (op2, mode))
3523 op2 = copy_to_mode_reg (mode, op2);
3525 pat = GEN_FCN ((int) code) (target, xinner,
3533 delete_insns_since (last);
3538 if (!ACCUMULATE_OUTGOING_ARGS)
3540 /* If the source is referenced relative to the stack pointer,
3541 copy it to another register to stabilize it. We do not need
3542 to do this if we know that we won't be changing sp. */
3544 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3545 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3546 temp = copy_to_reg (temp);
3549 /* Make inhibit_defer_pop nonzero around the library call
3550 to force it to pop the bcopy-arguments right away. */
3552 #ifdef TARGET_MEM_FUNCTIONS
3553 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3554 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3555 convert_to_mode (TYPE_MODE (sizetype),
3556 size, TREE_UNSIGNED (sizetype)),
3557 TYPE_MODE (sizetype));
3559 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3560 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3561 convert_to_mode (TYPE_MODE (integer_type_node),
3563 TREE_UNSIGNED (integer_type_node)),
3564 TYPE_MODE (integer_type_node));
3569 else if (partial > 0)
3571 /* Scalar partly in registers. */
3573 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3576 /* # words of start of argument
3577 that we must make space for but need not store. */
3578 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3579 int args_offset = INTVAL (args_so_far);
3582 /* Push padding now if padding above and stack grows down,
3583 or if padding below and stack grows up.
3584 But if space already allocated, this has already been done. */
3585 if (extra && args_addr == 0
3586 && where_pad != none && where_pad != stack_direction)
3587 anti_adjust_stack (GEN_INT (extra));
3589 /* If we make space by pushing it, we might as well push
3590 the real data. Otherwise, we can leave OFFSET nonzero
3591 and leave the space uninitialized. */
3595 /* Now NOT_STACK gets the number of words that we don't need to
3596 allocate on the stack. */
3597 not_stack = partial - offset;
3599 /* If the partial register-part of the arg counts in its stack size,
3600 skip the part of stack space corresponding to the registers.
3601 Otherwise, start copying to the beginning of the stack space,
3602 by setting SKIP to 0. */
3603 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3605 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3606 x = validize_mem (force_const_mem (mode, x));
3608 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3609 SUBREGs of such registers are not allowed. */
3610 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3611 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3612 x = copy_to_reg (x);
3614 /* Loop over all the words allocated on the stack for this arg. */
3615 /* We can do it by words, because any scalar bigger than a word
3616 has a size a multiple of a word. */
3617 #ifndef PUSH_ARGS_REVERSED
3618 for (i = not_stack; i < size; i++)
3620 for (i = size - 1; i >= not_stack; i--)
3622 if (i >= not_stack + offset)
3623 emit_push_insn (operand_subword_force (x, i, mode),
3624 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3626 GEN_INT (args_offset + ((i - not_stack + skip)
3628 reg_parm_stack_space, alignment_pad);
3633 rtx target = NULL_RTX;
3636 /* Push padding now if padding above and stack grows down,
3637 or if padding below and stack grows up.
3638 But if space already allocated, this has already been done. */
3639 if (extra && args_addr == 0
3640 && where_pad != none && where_pad != stack_direction)
3641 anti_adjust_stack (GEN_INT (extra));
3643 #ifdef PUSH_ROUNDING
3644 if (args_addr == 0 && PUSH_ARGS)
3645 emit_single_push_insn (mode, x, type);
3649 if (GET_CODE (args_so_far) == CONST_INT)
3651 = memory_address (mode,
3652 plus_constant (args_addr,
3653 INTVAL (args_so_far)));
3655 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3658 dest = gen_rtx_MEM (mode, addr);
3661 set_mem_attributes (dest, type, 1);
3662 /* Function incoming arguments may overlap with sibling call
3663 outgoing arguments and we cannot allow reordering of reads
3664 from function arguments with stores to outgoing arguments
3665 of sibling calls. */
3666 set_mem_alias_set (dest, 0);
3669 emit_move_insn (dest, x);
3675 /* If part should go in registers, copy that part
3676 into the appropriate registers. Do this now, at the end,
3677 since mem-to-mem copies above may do function calls. */
3678 if (partial > 0 && reg != 0)
3680 /* Handle calls that pass values in multiple non-contiguous locations.
3681 The Irix 6 ABI has examples of this. */
3682 if (GET_CODE (reg) == PARALLEL)
3683 emit_group_load (reg, x, -1); /* ??? size? */
3685 move_block_to_reg (REGNO (reg), x, partial, mode);
3688 if (extra && args_addr == 0 && where_pad == stack_direction)
3689 anti_adjust_stack (GEN_INT (extra));
3691 if (alignment_pad && args_addr == 0)
3692 anti_adjust_stack (alignment_pad);
3695 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3703 /* Only registers can be subtargets. */
3704 || GET_CODE (x) != REG
3705 /* If the register is readonly, it can't be set more than once. */
3706 || RTX_UNCHANGING_P (x)
3707 /* Don't use hard regs to avoid extending their life. */
3708 || REGNO (x) < FIRST_PSEUDO_REGISTER
3709 /* Avoid subtargets inside loops,
3710 since they hide some invariant expressions. */
3711 || preserve_subexpressions_p ())
3715 /* Expand an assignment that stores the value of FROM into TO.
3716 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3717 (This may contain a QUEUED rtx;
3718 if the value is constant, this rtx is a constant.)
3719 Otherwise, the returned value is NULL_RTX.
3721 SUGGEST_REG is no longer actually used.
3722 It used to mean, copy the value through a register
3723 and return that register, if that is possible.
3724 We now use WANT_VALUE to decide whether to do this. */
3727 expand_assignment (to, from, want_value, suggest_reg)
3730 int suggest_reg ATTRIBUTE_UNUSED;
3735 /* Don't crash if the lhs of the assignment was erroneous. */
3737 if (TREE_CODE (to) == ERROR_MARK)
3739 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3740 return want_value ? result : NULL_RTX;
3743 /* Assignment of a structure component needs special treatment
3744 if the structure component's rtx is not simply a MEM.
3745 Assignment of an array element at a constant index, and assignment of
3746 an array element in an unaligned packed structure field, has the same
3749 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3750 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3752 enum machine_mode mode1;
3753 HOST_WIDE_INT bitsize, bitpos;
3761 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3762 &unsignedp, &volatilep);
3764 /* If we are going to use store_bit_field and extract_bit_field,
3765 make sure to_rtx will be safe for multiple use. */
3767 if (mode1 == VOIDmode && want_value)
3768 tem = stabilize_reference (tem);
3770 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3774 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3776 if (GET_CODE (to_rtx) != MEM)
3779 #ifdef POINTERS_EXTEND_UNSIGNED
3780 if (GET_MODE (offset_rtx) != Pmode)
3781 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3783 if (GET_MODE (offset_rtx) != ptr_mode)
3784 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3787 /* A constant address in TO_RTX can have VOIDmode, we must not try
3788 to call force_reg for that case. Avoid that case. */
3789 if (GET_CODE (to_rtx) == MEM
3790 && GET_MODE (to_rtx) == BLKmode
3791 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3793 && (bitpos % bitsize) == 0
3794 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3795 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3797 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3801 to_rtx = offset_address (to_rtx, offset_rtx,
3802 highest_pow2_factor_for_type (TREE_TYPE (to),
3806 if (GET_CODE (to_rtx) == MEM)
3808 tree old_expr = MEM_EXPR (to_rtx);
3810 /* If the field is at offset zero, we could have been given the
3811 DECL_RTX of the parent struct. Don't munge it. */
3812 to_rtx = shallow_copy_rtx (to_rtx);
3814 set_mem_attributes (to_rtx, to, 0);
3816 /* If we changed MEM_EXPR, that means we're now referencing
3817 the COMPONENT_REF, which means that MEM_OFFSET must be
3818 relative to that field. But we've not yet reflected BITPOS
3819 in TO_RTX. This will be done in store_field. Adjust for
3820 that by biasing MEM_OFFSET by -bitpos. */
3821 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3822 && (bitpos / BITS_PER_UNIT) != 0)
3823 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3824 - (bitpos / BITS_PER_UNIT)));
3827 /* Deal with volatile and readonly fields. The former is only done
3828 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3829 if (volatilep && GET_CODE (to_rtx) == MEM)
3831 if (to_rtx == orig_to_rtx)
3832 to_rtx = copy_rtx (to_rtx);
3833 MEM_VOLATILE_P (to_rtx) = 1;
3836 if (TREE_CODE (to) == COMPONENT_REF
3837 && TREE_READONLY (TREE_OPERAND (to, 1)))
3839 if (to_rtx == orig_to_rtx)
3840 to_rtx = copy_rtx (to_rtx);
3841 RTX_UNCHANGING_P (to_rtx) = 1;
3844 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3846 if (to_rtx == orig_to_rtx)
3847 to_rtx = copy_rtx (to_rtx);
3848 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3851 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3853 /* Spurious cast for HPUX compiler. */
3854 ? ((enum machine_mode)
3855 TYPE_MODE (TREE_TYPE (to)))
3857 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3859 preserve_temp_slots (result);
3863 /* If the value is meaningful, convert RESULT to the proper mode.
3864 Otherwise, return nothing. */
3865 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3866 TYPE_MODE (TREE_TYPE (from)),
3868 TREE_UNSIGNED (TREE_TYPE (to)))
3872 /* If the rhs is a function call and its value is not an aggregate,
3873 call the function before we start to compute the lhs.
3874 This is needed for correct code for cases such as
3875 val = setjmp (buf) on machines where reference to val
3876 requires loading up part of an address in a separate insn.
3878 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3879 since it might be a promoted variable where the zero- or sign- extension
3880 needs to be done. Handling this in the normal way is safe because no
3881 computation is done before the call. */
3882 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3883 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3884 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3885 && GET_CODE (DECL_RTL (to)) == REG))
3890 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3892 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3894 /* Handle calls that return values in multiple non-contiguous locations.
3895 The Irix 6 ABI has examples of this. */
3896 if (GET_CODE (to_rtx) == PARALLEL)
3897 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3898 else if (GET_MODE (to_rtx) == BLKmode)
3899 emit_block_move (to_rtx, value, expr_size (from));
3902 #ifdef POINTERS_EXTEND_UNSIGNED
3903 if (POINTER_TYPE_P (TREE_TYPE (to))
3904 && GET_MODE (to_rtx) != GET_MODE (value))
3905 value = convert_memory_address (GET_MODE (to_rtx), value);
3907 emit_move_insn (to_rtx, value);
3909 preserve_temp_slots (to_rtx);
3912 return want_value ? to_rtx : NULL_RTX;
3915 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3916 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3919 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3921 /* Don't move directly into a return register. */
3922 if (TREE_CODE (to) == RESULT_DECL
3923 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3928 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3930 if (GET_CODE (to_rtx) == PARALLEL)
3931 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3933 emit_move_insn (to_rtx, temp);
3935 preserve_temp_slots (to_rtx);
3938 return want_value ? to_rtx : NULL_RTX;
3941 /* In case we are returning the contents of an object which overlaps
3942 the place the value is being stored, use a safe function when copying
3943 a value through a pointer into a structure value return block. */
3944 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3945 && current_function_returns_struct
3946 && !current_function_returns_pcc_struct)
3951 size = expr_size (from);
3952 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3954 #ifdef TARGET_MEM_FUNCTIONS
3955 emit_library_call (memmove_libfunc, LCT_NORMAL,
3956 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3957 XEXP (from_rtx, 0), Pmode,
3958 convert_to_mode (TYPE_MODE (sizetype),
3959 size, TREE_UNSIGNED (sizetype)),
3960 TYPE_MODE (sizetype));
3962 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3963 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3964 XEXP (to_rtx, 0), Pmode,
3965 convert_to_mode (TYPE_MODE (integer_type_node),
3966 size, TREE_UNSIGNED (integer_type_node)),
3967 TYPE_MODE (integer_type_node));
3970 preserve_temp_slots (to_rtx);
3973 return want_value ? to_rtx : NULL_RTX;
3976 /* Compute FROM and store the value in the rtx we got. */
3979 result = store_expr (from, to_rtx, want_value);
3980 preserve_temp_slots (result);
3983 return want_value ? result : NULL_RTX;
3986 /* Generate code for computing expression EXP,
3987 and storing the value into TARGET.
3988 TARGET may contain a QUEUED rtx.
3990 If WANT_VALUE is nonzero, return a copy of the value
3991 not in TARGET, so that we can be sure to use the proper
3992 value in a containing expression even if TARGET has something
3993 else stored in it. If possible, we copy the value through a pseudo
3994 and return that pseudo. Or, if the value is constant, we try to
3995 return the constant. In some cases, we return a pseudo
3996 copied *from* TARGET.
3998 If the mode is BLKmode then we may return TARGET itself.
3999 It turns out that in BLKmode it doesn't cause a problem.
4000 because C has no operators that could combine two different
4001 assignments into the same BLKmode object with different values
4002 with no sequence point. Will other languages need this to
4005 If WANT_VALUE is 0, we return NULL, to make sure
4006 to catch quickly any cases where the caller uses the value
4007 and fails to set WANT_VALUE. */
4010 store_expr (exp, target, want_value)
4016 int dont_return_target = 0;
4017 int dont_store_target = 0;
4019 if (TREE_CODE (exp) == COMPOUND_EXPR)
4021 /* Perform first part of compound expression, then assign from second
4023 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4025 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4027 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4029 /* For conditional expression, get safe form of the target. Then
4030 test the condition, doing the appropriate assignment on either
4031 side. This avoids the creation of unnecessary temporaries.
4032 For non-BLKmode, it is more efficient not to do this. */
4034 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4037 target = protect_from_queue (target, 1);
4039 do_pending_stack_adjust ();
4041 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4042 start_cleanup_deferral ();
4043 store_expr (TREE_OPERAND (exp, 1), target, 0);
4044 end_cleanup_deferral ();
4046 emit_jump_insn (gen_jump (lab2));
4049 start_cleanup_deferral ();
4050 store_expr (TREE_OPERAND (exp, 2), target, 0);
4051 end_cleanup_deferral ();
4056 return want_value ? target : NULL_RTX;
4058 else if (queued_subexp_p (target))
4059 /* If target contains a postincrement, let's not risk
4060 using it as the place to generate the rhs. */
4062 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4064 /* Expand EXP into a new pseudo. */
4065 temp = gen_reg_rtx (GET_MODE (target));
4066 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4069 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4071 /* If target is volatile, ANSI requires accessing the value
4072 *from* the target, if it is accessed. So make that happen.
4073 In no case return the target itself. */
4074 if (! MEM_VOLATILE_P (target) && want_value)
4075 dont_return_target = 1;
4077 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4078 && GET_MODE (target) != BLKmode)
4079 /* If target is in memory and caller wants value in a register instead,
4080 arrange that. Pass TARGET as target for expand_expr so that,
4081 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4082 We know expand_expr will not use the target in that case.
4083 Don't do this if TARGET is volatile because we are supposed
4084 to write it and then read it. */
4086 temp = expand_expr (exp, target, GET_MODE (target), 0);
4087 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4089 /* If TEMP is already in the desired TARGET, only copy it from
4090 memory and don't store it there again. */
4092 || (rtx_equal_p (temp, target)
4093 && ! side_effects_p (temp) && ! side_effects_p (target)))
4094 dont_store_target = 1;
4095 temp = copy_to_reg (temp);
4097 dont_return_target = 1;
4099 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4100 /* If this is an scalar in a register that is stored in a wider mode
4101 than the declared mode, compute the result into its declared mode
4102 and then convert to the wider mode. Our value is the computed
4105 rtx inner_target = 0;
4107 /* If we don't want a value, we can do the conversion inside EXP,
4108 which will often result in some optimizations. Do the conversion
4109 in two steps: first change the signedness, if needed, then
4110 the extend. But don't do this if the type of EXP is a subtype
4111 of something else since then the conversion might involve
4112 more than just converting modes. */
4113 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4114 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4116 if (TREE_UNSIGNED (TREE_TYPE (exp))
4117 != SUBREG_PROMOTED_UNSIGNED_P (target))
4119 ((*lang_hooks.types.signed_or_unsigned_type)
4120 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4122 exp = convert ((*lang_hooks.types.type_for_mode)
4123 (GET_MODE (SUBREG_REG (target)),
4124 SUBREG_PROMOTED_UNSIGNED_P (target)),
4127 inner_target = SUBREG_REG (target);
4130 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4132 /* If TEMP is a volatile MEM and we want a result value, make
4133 the access now so it gets done only once. Likewise if
4134 it contains TARGET. */
4135 if (GET_CODE (temp) == MEM && want_value
4136 && (MEM_VOLATILE_P (temp)
4137 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4138 temp = copy_to_reg (temp);
4140 /* If TEMP is a VOIDmode constant, use convert_modes to make
4141 sure that we properly convert it. */
4142 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4144 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4145 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4146 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4147 GET_MODE (target), temp,
4148 SUBREG_PROMOTED_UNSIGNED_P (target));
4151 convert_move (SUBREG_REG (target), temp,
4152 SUBREG_PROMOTED_UNSIGNED_P (target));
4154 /* If we promoted a constant, change the mode back down to match
4155 target. Otherwise, the caller might get confused by a result whose
4156 mode is larger than expected. */
4158 if (want_value && GET_MODE (temp) != GET_MODE (target))
4160 if (GET_MODE (temp) != VOIDmode)
4162 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4163 SUBREG_PROMOTED_VAR_P (temp) = 1;
4164 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4165 SUBREG_PROMOTED_UNSIGNED_P (target));
4168 temp = convert_modes (GET_MODE (target),
4169 GET_MODE (SUBREG_REG (target)),
4170 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4173 return want_value ? temp : NULL_RTX;
4177 temp = expand_expr (exp, target, GET_MODE (target), 0);
4178 /* Return TARGET if it's a specified hardware register.
4179 If TARGET is a volatile mem ref, either return TARGET
4180 or return a reg copied *from* TARGET; ANSI requires this.
4182 Otherwise, if TEMP is not TARGET, return TEMP
4183 if it is constant (for efficiency),
4184 or if we really want the correct value. */
4185 if (!(target && GET_CODE (target) == REG
4186 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4187 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4188 && ! rtx_equal_p (temp, target)
4189 && (CONSTANT_P (temp) || want_value))
4190 dont_return_target = 1;
4193 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4194 the same as that of TARGET, adjust the constant. This is needed, for
4195 example, in case it is a CONST_DOUBLE and we want only a word-sized
4197 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4198 && TREE_CODE (exp) != ERROR_MARK
4199 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4200 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4201 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4203 /* If value was not generated in the target, store it there.
4204 Convert the value to TARGET's type first if necessary.
4205 If TEMP and TARGET compare equal according to rtx_equal_p, but
4206 one or both of them are volatile memory refs, we have to distinguish
4208 - expand_expr has used TARGET. In this case, we must not generate
4209 another copy. This can be detected by TARGET being equal according
4211 - expand_expr has not used TARGET - that means that the source just
4212 happens to have the same RTX form. Since temp will have been created
4213 by expand_expr, it will compare unequal according to == .
4214 We must generate a copy in this case, to reach the correct number
4215 of volatile memory references. */
4217 if ((! rtx_equal_p (temp, target)
4218 || (temp != target && (side_effects_p (temp)
4219 || side_effects_p (target))))
4220 && TREE_CODE (exp) != ERROR_MARK
4221 && ! dont_store_target
4222 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4223 but TARGET is not valid memory reference, TEMP will differ
4224 from TARGET although it is really the same location. */
4225 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4226 || target != DECL_RTL_IF_SET (exp)))
4228 target = protect_from_queue (target, 1);
4229 if (GET_MODE (temp) != GET_MODE (target)
4230 && GET_MODE (temp) != VOIDmode)
4232 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4233 if (dont_return_target)
4235 /* In this case, we will return TEMP,
4236 so make sure it has the proper mode.
4237 But don't forget to store the value into TARGET. */
4238 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4239 emit_move_insn (target, temp);
4242 convert_move (target, temp, unsignedp);
4245 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4247 /* Handle copying a string constant into an array. The string
4248 constant may be shorter than the array. So copy just the string's
4249 actual length, and clear the rest. First get the size of the data
4250 type of the string, which is actually the size of the target. */
4251 rtx size = expr_size (exp);
4253 if (GET_CODE (size) == CONST_INT
4254 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4255 emit_block_move (target, temp, size);
4258 /* Compute the size of the data to copy from the string. */
4260 = size_binop (MIN_EXPR,
4261 make_tree (sizetype, size),
4262 size_int (TREE_STRING_LENGTH (exp)));
4263 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4267 /* Copy that much. */
4268 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4269 emit_block_move (target, temp, copy_size_rtx);
4271 /* Figure out how much is left in TARGET that we have to clear.
4272 Do all calculations in ptr_mode. */
4273 if (GET_CODE (copy_size_rtx) == CONST_INT)
4275 size = plus_constant (size, -INTVAL (copy_size_rtx));
4276 target = adjust_address (target, BLKmode,
4277 INTVAL (copy_size_rtx));
4281 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4282 copy_size_rtx, NULL_RTX, 0,
4285 #ifdef POINTERS_EXTEND_UNSIGNED
4286 if (GET_MODE (copy_size_rtx) != Pmode)
4287 copy_size_rtx = convert_memory_address (Pmode,
4291 target = offset_address (target, copy_size_rtx,
4292 highest_pow2_factor (copy_size));
4293 label = gen_label_rtx ();
4294 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4295 GET_MODE (size), 0, label);
4298 if (size != const0_rtx)
4299 clear_storage (target, size);
4305 /* Handle calls that return values in multiple non-contiguous locations.
4306 The Irix 6 ABI has examples of this. */
4307 else if (GET_CODE (target) == PARALLEL)
4308 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4309 else if (GET_MODE (temp) == BLKmode)
4310 emit_block_move (target, temp, expr_size (exp));
4312 emit_move_insn (target, temp);
4315 /* If we don't want a value, return NULL_RTX. */
4319 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4320 ??? The latter test doesn't seem to make sense. */
4321 else if (dont_return_target && GET_CODE (temp) != MEM)
4324 /* Return TARGET itself if it is a hard register. */
4325 else if (want_value && GET_MODE (target) != BLKmode
4326 && ! (GET_CODE (target) == REG
4327 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4328 return copy_to_reg (target);
4334 /* Return 1 if EXP just contains zeros. */
4342 switch (TREE_CODE (exp))
4346 case NON_LVALUE_EXPR:
4347 case VIEW_CONVERT_EXPR:
4348 return is_zeros_p (TREE_OPERAND (exp, 0));
4351 return integer_zerop (exp);
4355 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4358 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4361 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4362 elt = TREE_CHAIN (elt))
4363 if (!is_zeros_p (TREE_VALUE (elt)))
4369 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4370 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4371 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4372 if (! is_zeros_p (TREE_VALUE (elt)))
4382 /* Return 1 if EXP contains mostly (3/4) zeros. */
4385 mostly_zeros_p (exp)
4388 if (TREE_CODE (exp) == CONSTRUCTOR)
4390 int elts = 0, zeros = 0;
4391 tree elt = CONSTRUCTOR_ELTS (exp);
4392 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4394 /* If there are no ranges of true bits, it is all zero. */
4395 return elt == NULL_TREE;
4397 for (; elt; elt = TREE_CHAIN (elt))
4399 /* We do not handle the case where the index is a RANGE_EXPR,
4400 so the statistic will be somewhat inaccurate.
4401 We do make a more accurate count in store_constructor itself,
4402 so since this function is only used for nested array elements,
4403 this should be close enough. */
4404 if (mostly_zeros_p (TREE_VALUE (elt)))
4409 return 4 * zeros >= 3 * elts;
4412 return is_zeros_p (exp);
4415 /* Helper function for store_constructor.
4416 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4417 TYPE is the type of the CONSTRUCTOR, not the element type.
4418 CLEARED is as for store_constructor.
4419 ALIAS_SET is the alias set to use for any stores.
4421 This provides a recursive shortcut back to store_constructor when it isn't
4422 necessary to go through store_field. This is so that we can pass through
4423 the cleared field to let store_constructor know that we may not have to
4424 clear a substructure if the outer structure has already been cleared. */
4427 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4430 unsigned HOST_WIDE_INT bitsize;
4431 HOST_WIDE_INT bitpos;
4432 enum machine_mode mode;
4437 if (TREE_CODE (exp) == CONSTRUCTOR
4438 && bitpos % BITS_PER_UNIT == 0
4439 /* If we have a non-zero bitpos for a register target, then we just
4440 let store_field do the bitfield handling. This is unlikely to
4441 generate unnecessary clear instructions anyways. */
4442 && (bitpos == 0 || GET_CODE (target) == MEM))
4444 if (GET_CODE (target) == MEM)
4446 = adjust_address (target,
4447 GET_MODE (target) == BLKmode
4449 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4450 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4453 /* Update the alias set, if required. */
4454 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4455 && MEM_ALIAS_SET (target) != 0)
4457 target = copy_rtx (target);
4458 set_mem_alias_set (target, alias_set);
4461 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4464 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4468 /* Store the value of constructor EXP into the rtx TARGET.
4469 TARGET is either a REG or a MEM; we know it cannot conflict, since
4470 safe_from_p has been called.
4471 CLEARED is true if TARGET is known to have been zero'd.
4472 SIZE is the number of bytes of TARGET we are allowed to modify: this
4473 may not be the same as the size of EXP if we are assigning to a field
4474 which has been packed to exclude padding bits. */
4477 store_constructor (exp, target, cleared, size)
4483 tree type = TREE_TYPE (exp);
4484 #ifdef WORD_REGISTER_OPERATIONS
4485 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4488 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4489 || TREE_CODE (type) == QUAL_UNION_TYPE)
4493 /* We either clear the aggregate or indicate the value is dead. */
4494 if ((TREE_CODE (type) == UNION_TYPE
4495 || TREE_CODE (type) == QUAL_UNION_TYPE)
4497 && ! CONSTRUCTOR_ELTS (exp))
4498 /* If the constructor is empty, clear the union. */
4500 clear_storage (target, expr_size (exp));
4504 /* If we are building a static constructor into a register,
4505 set the initial value as zero so we can fold the value into
4506 a constant. But if more than one register is involved,
4507 this probably loses. */
4508 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4509 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4511 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4515 /* If the constructor has fewer fields than the structure
4516 or if we are initializing the structure to mostly zeros,
4517 clear the whole structure first. Don't do this if TARGET is a
4518 register whose mode size isn't equal to SIZE since clear_storage
4519 can't handle this case. */
4520 else if (! cleared && size > 0
4521 && ((list_length (CONSTRUCTOR_ELTS (exp))
4522 != fields_length (type))
4523 || mostly_zeros_p (exp))
4524 && (GET_CODE (target) != REG
4525 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4528 clear_storage (target, GEN_INT (size));
4533 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4535 /* Store each element of the constructor into
4536 the corresponding field of TARGET. */
4538 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4540 tree field = TREE_PURPOSE (elt);
4541 tree value = TREE_VALUE (elt);
4542 enum machine_mode mode;
4543 HOST_WIDE_INT bitsize;
4544 HOST_WIDE_INT bitpos = 0;
4547 rtx to_rtx = target;
4549 /* Just ignore missing fields.
4550 We cleared the whole structure, above,
4551 if any fields are missing. */
4555 if (cleared && is_zeros_p (value))
4558 if (host_integerp (DECL_SIZE (field), 1))
4559 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4563 unsignedp = TREE_UNSIGNED (field);
4564 mode = DECL_MODE (field);
4565 if (DECL_BIT_FIELD (field))
4568 offset = DECL_FIELD_OFFSET (field);
4569 if (host_integerp (offset, 0)
4570 && host_integerp (bit_position (field), 0))
4572 bitpos = int_bit_position (field);
4576 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4582 if (contains_placeholder_p (offset))
4583 offset = build (WITH_RECORD_EXPR, sizetype,
4584 offset, make_tree (TREE_TYPE (exp), target));
4586 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4587 if (GET_CODE (to_rtx) != MEM)
4590 #ifdef POINTERS_EXTEND_UNSIGNED
4591 if (GET_MODE (offset_rtx) != Pmode)
4592 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4594 if (GET_MODE (offset_rtx) != ptr_mode)
4595 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4598 to_rtx = offset_address (to_rtx, offset_rtx,
4599 highest_pow2_factor (offset));
4602 if (TREE_READONLY (field))
4604 if (GET_CODE (to_rtx) == MEM)
4605 to_rtx = copy_rtx (to_rtx);
4607 RTX_UNCHANGING_P (to_rtx) = 1;
4610 #ifdef WORD_REGISTER_OPERATIONS
4611 /* If this initializes a field that is smaller than a word, at the
4612 start of a word, try to widen it to a full word.
4613 This special case allows us to output C++ member function
4614 initializations in a form that the optimizers can understand. */
4615 if (GET_CODE (target) == REG
4616 && bitsize < BITS_PER_WORD
4617 && bitpos % BITS_PER_WORD == 0
4618 && GET_MODE_CLASS (mode) == MODE_INT
4619 && TREE_CODE (value) == INTEGER_CST
4621 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4623 tree type = TREE_TYPE (value);
4625 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4627 type = (*lang_hooks.types.type_for_size)
4628 (BITS_PER_WORD, TREE_UNSIGNED (type));
4629 value = convert (type, value);
4632 if (BYTES_BIG_ENDIAN)
4634 = fold (build (LSHIFT_EXPR, type, value,
4635 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4636 bitsize = BITS_PER_WORD;
4641 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4642 && DECL_NONADDRESSABLE_P (field))
4644 to_rtx = copy_rtx (to_rtx);
4645 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4648 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4649 value, type, cleared,
4650 get_alias_set (TREE_TYPE (field)));
4653 else if (TREE_CODE (type) == ARRAY_TYPE
4654 || TREE_CODE (type) == VECTOR_TYPE)
4659 tree domain = TYPE_DOMAIN (type);
4660 tree elttype = TREE_TYPE (type);
4662 HOST_WIDE_INT minelt = 0;
4663 HOST_WIDE_INT maxelt = 0;
4665 /* Vectors are like arrays, but the domain is stored via an array
4667 if (TREE_CODE (type) == VECTOR_TYPE)
4669 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4670 the same field as TYPE_DOMAIN, we are not guaranteed that
4672 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4673 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4676 const_bounds_p = (TYPE_MIN_VALUE (domain)
4677 && TYPE_MAX_VALUE (domain)
4678 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4679 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4681 /* If we have constant bounds for the range of the type, get them. */
4684 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4685 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4688 /* If the constructor has fewer elements than the array,
4689 clear the whole array first. Similarly if this is
4690 static constructor of a non-BLKmode object. */
4691 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4695 HOST_WIDE_INT count = 0, zero_count = 0;
4696 need_to_clear = ! const_bounds_p;
4698 /* This loop is a more accurate version of the loop in
4699 mostly_zeros_p (it handles RANGE_EXPR in an index).
4700 It is also needed to check for missing elements. */
4701 for (elt = CONSTRUCTOR_ELTS (exp);
4702 elt != NULL_TREE && ! need_to_clear;
4703 elt = TREE_CHAIN (elt))
4705 tree index = TREE_PURPOSE (elt);
4706 HOST_WIDE_INT this_node_count;
4708 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4710 tree lo_index = TREE_OPERAND (index, 0);
4711 tree hi_index = TREE_OPERAND (index, 1);
4713 if (! host_integerp (lo_index, 1)
4714 || ! host_integerp (hi_index, 1))
4720 this_node_count = (tree_low_cst (hi_index, 1)
4721 - tree_low_cst (lo_index, 1) + 1);
4724 this_node_count = 1;
4726 count += this_node_count;
4727 if (mostly_zeros_p (TREE_VALUE (elt)))
4728 zero_count += this_node_count;
4731 /* Clear the entire array first if there are any missing elements,
4732 or if the incidence of zero elements is >= 75%. */
4734 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4738 if (need_to_clear && size > 0)
4743 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4745 clear_storage (target, GEN_INT (size));
4749 else if (REG_P (target))
4750 /* Inform later passes that the old value is dead. */
4751 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4753 /* Store each element of the constructor into
4754 the corresponding element of TARGET, determined
4755 by counting the elements. */
4756 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4758 elt = TREE_CHAIN (elt), i++)
4760 enum machine_mode mode;
4761 HOST_WIDE_INT bitsize;
4762 HOST_WIDE_INT bitpos;
4764 tree value = TREE_VALUE (elt);
4765 tree index = TREE_PURPOSE (elt);
4766 rtx xtarget = target;
4768 if (cleared && is_zeros_p (value))
4771 unsignedp = TREE_UNSIGNED (elttype);
4772 mode = TYPE_MODE (elttype);
4773 if (mode == BLKmode)
4774 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4775 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4778 bitsize = GET_MODE_BITSIZE (mode);
4780 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4782 tree lo_index = TREE_OPERAND (index, 0);
4783 tree hi_index = TREE_OPERAND (index, 1);
4784 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4785 struct nesting *loop;
4786 HOST_WIDE_INT lo, hi, count;
4789 /* If the range is constant and "small", unroll the loop. */
4791 && host_integerp (lo_index, 0)
4792 && host_integerp (hi_index, 0)
4793 && (lo = tree_low_cst (lo_index, 0),
4794 hi = tree_low_cst (hi_index, 0),
4795 count = hi - lo + 1,
4796 (GET_CODE (target) != MEM
4798 || (host_integerp (TYPE_SIZE (elttype), 1)
4799 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4802 lo -= minelt; hi -= minelt;
4803 for (; lo <= hi; lo++)
4805 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4807 if (GET_CODE (target) == MEM
4808 && !MEM_KEEP_ALIAS_SET_P (target)
4809 && TREE_CODE (type) == ARRAY_TYPE
4810 && TYPE_NONALIASED_COMPONENT (type))
4812 target = copy_rtx (target);
4813 MEM_KEEP_ALIAS_SET_P (target) = 1;
4816 store_constructor_field
4817 (target, bitsize, bitpos, mode, value, type, cleared,
4818 get_alias_set (elttype));
4823 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4824 loop_top = gen_label_rtx ();
4825 loop_end = gen_label_rtx ();
4827 unsignedp = TREE_UNSIGNED (domain);
4829 index = build_decl (VAR_DECL, NULL_TREE, domain);
4832 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4834 SET_DECL_RTL (index, index_r);
4835 if (TREE_CODE (value) == SAVE_EXPR
4836 && SAVE_EXPR_RTL (value) == 0)
4838 /* Make sure value gets expanded once before the
4840 expand_expr (value, const0_rtx, VOIDmode, 0);
4843 store_expr (lo_index, index_r, 0);
4844 loop = expand_start_loop (0);
4846 /* Assign value to element index. */
4848 = convert (ssizetype,
4849 fold (build (MINUS_EXPR, TREE_TYPE (index),
4850 index, TYPE_MIN_VALUE (domain))));
4851 position = size_binop (MULT_EXPR, position,
4853 TYPE_SIZE_UNIT (elttype)));
4855 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4856 xtarget = offset_address (target, pos_rtx,
4857 highest_pow2_factor (position));
4858 xtarget = adjust_address (xtarget, mode, 0);
4859 if (TREE_CODE (value) == CONSTRUCTOR)
4860 store_constructor (value, xtarget, cleared,
4861 bitsize / BITS_PER_UNIT);
4863 store_expr (value, xtarget, 0);
4865 expand_exit_loop_if_false (loop,
4866 build (LT_EXPR, integer_type_node,
4869 expand_increment (build (PREINCREMENT_EXPR,
4871 index, integer_one_node), 0, 0);
4873 emit_label (loop_end);
4876 else if ((index != 0 && ! host_integerp (index, 0))
4877 || ! host_integerp (TYPE_SIZE (elttype), 1))
4882 index = ssize_int (1);
4885 index = convert (ssizetype,
4886 fold (build (MINUS_EXPR, index,
4887 TYPE_MIN_VALUE (domain))));
4889 position = size_binop (MULT_EXPR, index,
4891 TYPE_SIZE_UNIT (elttype)));
4892 xtarget = offset_address (target,
4893 expand_expr (position, 0, VOIDmode, 0),
4894 highest_pow2_factor (position));
4895 xtarget = adjust_address (xtarget, mode, 0);
4896 store_expr (value, xtarget, 0);
4901 bitpos = ((tree_low_cst (index, 0) - minelt)
4902 * tree_low_cst (TYPE_SIZE (elttype), 1));
4904 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4906 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4907 && TREE_CODE (type) == ARRAY_TYPE
4908 && TYPE_NONALIASED_COMPONENT (type))
4910 target = copy_rtx (target);
4911 MEM_KEEP_ALIAS_SET_P (target) = 1;
4914 store_constructor_field (target, bitsize, bitpos, mode, value,
4915 type, cleared, get_alias_set (elttype));
4921 /* Set constructor assignments. */
4922 else if (TREE_CODE (type) == SET_TYPE)
4924 tree elt = CONSTRUCTOR_ELTS (exp);
4925 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4926 tree domain = TYPE_DOMAIN (type);
4927 tree domain_min, domain_max, bitlength;
4929 /* The default implementation strategy is to extract the constant
4930 parts of the constructor, use that to initialize the target,
4931 and then "or" in whatever non-constant ranges we need in addition.
4933 If a large set is all zero or all ones, it is
4934 probably better to set it using memset (if available) or bzero.
4935 Also, if a large set has just a single range, it may also be
4936 better to first clear all the first clear the set (using
4937 bzero/memset), and set the bits we want. */
4939 /* Check for all zeros. */
4940 if (elt == NULL_TREE && size > 0)
4943 clear_storage (target, GEN_INT (size));
4947 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4948 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4949 bitlength = size_binop (PLUS_EXPR,
4950 size_diffop (domain_max, domain_min),
4953 nbits = tree_low_cst (bitlength, 1);
4955 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4956 are "complicated" (more than one range), initialize (the
4957 constant parts) by copying from a constant. */
4958 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4959 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4961 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4962 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4963 char *bit_buffer = (char *) alloca (nbits);
4964 HOST_WIDE_INT word = 0;
4965 unsigned int bit_pos = 0;
4966 unsigned int ibit = 0;
4967 unsigned int offset = 0; /* In bytes from beginning of set. */
4969 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4972 if (bit_buffer[ibit])
4974 if (BYTES_BIG_ENDIAN)
4975 word |= (1 << (set_word_size - 1 - bit_pos));
4977 word |= 1 << bit_pos;
4981 if (bit_pos >= set_word_size || ibit == nbits)
4983 if (word != 0 || ! cleared)
4985 rtx datum = GEN_INT (word);
4988 /* The assumption here is that it is safe to use
4989 XEXP if the set is multi-word, but not if
4990 it's single-word. */
4991 if (GET_CODE (target) == MEM)
4992 to_rtx = adjust_address (target, mode, offset);
4993 else if (offset == 0)
4997 emit_move_insn (to_rtx, datum);
5004 offset += set_word_size / BITS_PER_UNIT;
5009 /* Don't bother clearing storage if the set is all ones. */
5010 if (TREE_CHAIN (elt) != NULL_TREE
5011 || (TREE_PURPOSE (elt) == NULL_TREE
5013 : ( ! host_integerp (TREE_VALUE (elt), 0)
5014 || ! host_integerp (TREE_PURPOSE (elt), 0)
5015 || (tree_low_cst (TREE_VALUE (elt), 0)
5016 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5017 != (HOST_WIDE_INT) nbits))))
5018 clear_storage (target, expr_size (exp));
5020 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5022 /* Start of range of element or NULL. */
5023 tree startbit = TREE_PURPOSE (elt);
5024 /* End of range of element, or element value. */
5025 tree endbit = TREE_VALUE (elt);
5026 #ifdef TARGET_MEM_FUNCTIONS
5027 HOST_WIDE_INT startb, endb;
5029 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5031 bitlength_rtx = expand_expr (bitlength,
5032 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5034 /* Handle non-range tuple element like [ expr ]. */
5035 if (startbit == NULL_TREE)
5037 startbit = save_expr (endbit);
5041 startbit = convert (sizetype, startbit);
5042 endbit = convert (sizetype, endbit);
5043 if (! integer_zerop (domain_min))
5045 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5046 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5048 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5049 EXPAND_CONST_ADDRESS);
5050 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5051 EXPAND_CONST_ADDRESS);
5057 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5058 (GET_MODE (target), 0),
5061 emit_move_insn (targetx, target);
5064 else if (GET_CODE (target) == MEM)
5069 #ifdef TARGET_MEM_FUNCTIONS
5070 /* Optimization: If startbit and endbit are
5071 constants divisible by BITS_PER_UNIT,
5072 call memset instead. */
5073 if (TREE_CODE (startbit) == INTEGER_CST
5074 && TREE_CODE (endbit) == INTEGER_CST
5075 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5076 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5078 emit_library_call (memset_libfunc, LCT_NORMAL,
5080 plus_constant (XEXP (targetx, 0),
5081 startb / BITS_PER_UNIT),
5083 constm1_rtx, TYPE_MODE (integer_type_node),
5084 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5085 TYPE_MODE (sizetype));
5089 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5090 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5091 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5092 startbit_rtx, TYPE_MODE (sizetype),
5093 endbit_rtx, TYPE_MODE (sizetype));
5096 emit_move_insn (target, targetx);
5104 /* Store the value of EXP (an expression tree)
5105 into a subfield of TARGET which has mode MODE and occupies
5106 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5107 If MODE is VOIDmode, it means that we are storing into a bit-field.
5109 If VALUE_MODE is VOIDmode, return nothing in particular.
5110 UNSIGNEDP is not used in this case.
5112 Otherwise, return an rtx for the value stored. This rtx
5113 has mode VALUE_MODE if that is convenient to do.
5114 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5116 TYPE is the type of the underlying object,
5118 ALIAS_SET is the alias set for the destination. This value will
5119 (in general) be different from that for TARGET, since TARGET is a
5120 reference to the containing structure. */
5123 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5126 HOST_WIDE_INT bitsize;
5127 HOST_WIDE_INT bitpos;
5128 enum machine_mode mode;
5130 enum machine_mode value_mode;
5135 HOST_WIDE_INT width_mask = 0;
5137 if (TREE_CODE (exp) == ERROR_MARK)
5140 /* If we have nothing to store, do nothing unless the expression has
5143 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5144 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5145 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5147 /* If we are storing into an unaligned field of an aligned union that is
5148 in a register, we may have the mode of TARGET being an integer mode but
5149 MODE == BLKmode. In that case, get an aligned object whose size and
5150 alignment are the same as TARGET and store TARGET into it (we can avoid
5151 the store if the field being stored is the entire width of TARGET). Then
5152 call ourselves recursively to store the field into a BLKmode version of
5153 that object. Finally, load from the object into TARGET. This is not
5154 very efficient in general, but should only be slightly more expensive
5155 than the otherwise-required unaligned accesses. Perhaps this can be
5156 cleaned up later. */
5159 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5163 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5165 rtx blk_object = adjust_address (object, BLKmode, 0);
5167 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5168 emit_move_insn (object, target);
5170 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5173 emit_move_insn (target, object);
5175 /* We want to return the BLKmode version of the data. */
5179 if (GET_CODE (target) == CONCAT)
5181 /* We're storing into a struct containing a single __complex. */
5185 return store_expr (exp, target, 0);
5188 /* If the structure is in a register or if the component
5189 is a bit field, we cannot use addressing to access it.
5190 Use bit-field techniques or SUBREG to store in it. */
5192 if (mode == VOIDmode
5193 || (mode != BLKmode && ! direct_store[(int) mode]
5194 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5195 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5196 || GET_CODE (target) == REG
5197 || GET_CODE (target) == SUBREG
5198 /* If the field isn't aligned enough to store as an ordinary memref,
5199 store it as a bit field. */
5200 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5201 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5202 || bitpos % GET_MODE_ALIGNMENT (mode)))
5203 /* If the RHS and field are a constant size and the size of the
5204 RHS isn't the same size as the bitfield, we must use bitfield
5207 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5208 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5210 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5212 /* If BITSIZE is narrower than the size of the type of EXP
5213 we will be narrowing TEMP. Normally, what's wanted are the
5214 low-order bits. However, if EXP's type is a record and this is
5215 big-endian machine, we want the upper BITSIZE bits. */
5216 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5217 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5218 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5219 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5220 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5224 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5226 if (mode != VOIDmode && mode != BLKmode
5227 && mode != TYPE_MODE (TREE_TYPE (exp)))
5228 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5230 /* If the modes of TARGET and TEMP are both BLKmode, both
5231 must be in memory and BITPOS must be aligned on a byte
5232 boundary. If so, we simply do a block copy. */
5233 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5235 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5236 || bitpos % BITS_PER_UNIT != 0)
5239 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5240 emit_block_move (target, temp,
5241 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5244 return value_mode == VOIDmode ? const0_rtx : target;
5247 /* Store the value in the bitfield. */
5248 store_bit_field (target, bitsize, bitpos, mode, temp,
5249 int_size_in_bytes (type));
5251 if (value_mode != VOIDmode)
5253 /* The caller wants an rtx for the value.
5254 If possible, avoid refetching from the bitfield itself. */
5256 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5259 enum machine_mode tmode;
5261 tmode = GET_MODE (temp);
5262 if (tmode == VOIDmode)
5266 return expand_and (tmode, temp,
5267 gen_int_mode (width_mask, tmode),
5270 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5271 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5272 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5275 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5276 NULL_RTX, value_mode, VOIDmode,
5277 int_size_in_bytes (type));
5283 rtx addr = XEXP (target, 0);
5284 rtx to_rtx = target;
5286 /* If a value is wanted, it must be the lhs;
5287 so make the address stable for multiple use. */
5289 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5290 && ! CONSTANT_ADDRESS_P (addr)
5291 /* A frame-pointer reference is already stable. */
5292 && ! (GET_CODE (addr) == PLUS
5293 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5294 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5295 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5296 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5298 /* Now build a reference to just the desired component. */
5300 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5302 if (to_rtx == target)
5303 to_rtx = copy_rtx (to_rtx);
5305 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5306 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5307 set_mem_alias_set (to_rtx, alias_set);
5309 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5313 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5314 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5315 codes and find the ultimate containing object, which we return.
5317 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5318 bit position, and *PUNSIGNEDP to the signedness of the field.
5319 If the position of the field is variable, we store a tree
5320 giving the variable offset (in units) in *POFFSET.
5321 This offset is in addition to the bit position.
5322 If the position is not variable, we store 0 in *POFFSET.
5324 If any of the extraction expressions is volatile,
5325 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5327 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5328 is a mode that can be used to access the field. In that case, *PBITSIZE
5331 If the field describes a variable-sized object, *PMODE is set to
5332 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5333 this case, but the address of the object can be found. */
5336 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5337 punsignedp, pvolatilep)
5339 HOST_WIDE_INT *pbitsize;
5340 HOST_WIDE_INT *pbitpos;
5342 enum machine_mode *pmode;
5347 enum machine_mode mode = VOIDmode;
5348 tree offset = size_zero_node;
5349 tree bit_offset = bitsize_zero_node;
5350 tree placeholder_ptr = 0;
5353 /* First get the mode, signedness, and size. We do this from just the
5354 outermost expression. */
5355 if (TREE_CODE (exp) == COMPONENT_REF)
5357 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5358 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5359 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5361 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5363 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5365 size_tree = TREE_OPERAND (exp, 1);
5366 *punsignedp = TREE_UNSIGNED (exp);
5370 mode = TYPE_MODE (TREE_TYPE (exp));
5371 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5373 if (mode == BLKmode)
5374 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5376 *pbitsize = GET_MODE_BITSIZE (mode);
5381 if (! host_integerp (size_tree, 1))
5382 mode = BLKmode, *pbitsize = -1;
5384 *pbitsize = tree_low_cst (size_tree, 1);
5387 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5388 and find the ultimate containing object. */
5391 if (TREE_CODE (exp) == BIT_FIELD_REF)
5392 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5393 else if (TREE_CODE (exp) == COMPONENT_REF)
5395 tree field = TREE_OPERAND (exp, 1);
5396 tree this_offset = DECL_FIELD_OFFSET (field);
5398 /* If this field hasn't been filled in yet, don't go
5399 past it. This should only happen when folding expressions
5400 made during type construction. */
5401 if (this_offset == 0)
5403 else if (! TREE_CONSTANT (this_offset)
5404 && contains_placeholder_p (this_offset))
5405 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5407 offset = size_binop (PLUS_EXPR, offset, this_offset);
5408 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5409 DECL_FIELD_BIT_OFFSET (field));
5411 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5414 else if (TREE_CODE (exp) == ARRAY_REF
5415 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5417 tree index = TREE_OPERAND (exp, 1);
5418 tree array = TREE_OPERAND (exp, 0);
5419 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5420 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5421 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5423 /* We assume all arrays have sizes that are a multiple of a byte.
5424 First subtract the lower bound, if any, in the type of the
5425 index, then convert to sizetype and multiply by the size of the
5427 if (low_bound != 0 && ! integer_zerop (low_bound))
5428 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5431 /* If the index has a self-referential type, pass it to a
5432 WITH_RECORD_EXPR; if the component size is, pass our
5433 component to one. */
5434 if (! TREE_CONSTANT (index)
5435 && contains_placeholder_p (index))
5436 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5437 if (! TREE_CONSTANT (unit_size)
5438 && contains_placeholder_p (unit_size))
5439 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5441 offset = size_binop (PLUS_EXPR, offset,
5442 size_binop (MULT_EXPR,
5443 convert (sizetype, index),
5447 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5449 tree new = find_placeholder (exp, &placeholder_ptr);
5451 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5452 We might have been called from tree optimization where we
5453 haven't set up an object yet. */
5461 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5462 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5463 && ! ((TREE_CODE (exp) == NOP_EXPR
5464 || TREE_CODE (exp) == CONVERT_EXPR)
5465 && (TYPE_MODE (TREE_TYPE (exp))
5466 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5469 /* If any reference in the chain is volatile, the effect is volatile. */
5470 if (TREE_THIS_VOLATILE (exp))
5473 exp = TREE_OPERAND (exp, 0);
5476 /* If OFFSET is constant, see if we can return the whole thing as a
5477 constant bit position. Otherwise, split it up. */
5478 if (host_integerp (offset, 0)
5479 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5481 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5482 && host_integerp (tem, 0))
5483 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5485 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5491 /* Return 1 if T is an expression that get_inner_reference handles. */
5494 handled_component_p (t)
5497 switch (TREE_CODE (t))
5502 case ARRAY_RANGE_REF:
5503 case NON_LVALUE_EXPR:
5504 case VIEW_CONVERT_EXPR:
5509 return (TYPE_MODE (TREE_TYPE (t))
5510 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5517 /* Given an rtx VALUE that may contain additions and multiplications, return
5518 an equivalent value that just refers to a register, memory, or constant.
5519 This is done by generating instructions to perform the arithmetic and
5520 returning a pseudo-register containing the value.
5522 The returned value may be a REG, SUBREG, MEM or constant. */
5525 force_operand (value, target)
5529 /* Use subtarget as the target for operand 0 of a binary operation. */
5530 rtx subtarget = get_subtarget (target);
5531 enum rtx_code code = GET_CODE (value);
5533 /* Check for a PIC address load. */
5534 if ((code == PLUS || code == MINUS)
5535 && XEXP (value, 0) == pic_offset_table_rtx
5536 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5537 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5538 || GET_CODE (XEXP (value, 1)) == CONST))
5541 subtarget = gen_reg_rtx (GET_MODE (value));
5542 emit_move_insn (subtarget, value);
5546 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5549 target = gen_reg_rtx (GET_MODE (value));
5550 convert_move (target, force_operand (XEXP (value, 0), NULL),
5551 code == ZERO_EXTEND);
5555 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5557 op2 = XEXP (value, 1);
5558 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5560 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5563 op2 = negate_rtx (GET_MODE (value), op2);
5566 /* Check for an addition with OP2 a constant integer and our first
5567 operand a PLUS of a virtual register and something else. In that
5568 case, we want to emit the sum of the virtual register and the
5569 constant first and then add the other value. This allows virtual
5570 register instantiation to simply modify the constant rather than
5571 creating another one around this addition. */
5572 if (code == PLUS && GET_CODE (op2) == CONST_INT
5573 && GET_CODE (XEXP (value, 0)) == PLUS
5574 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5575 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5576 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5578 rtx temp = expand_simple_binop (GET_MODE (value), code,
5579 XEXP (XEXP (value, 0), 0), op2,
5580 subtarget, 0, OPTAB_LIB_WIDEN);
5581 return expand_simple_binop (GET_MODE (value), code, temp,
5582 force_operand (XEXP (XEXP (value,
5584 target, 0, OPTAB_LIB_WIDEN);
5587 op1 = force_operand (XEXP (value, 0), subtarget);
5588 op2 = force_operand (op2, NULL_RTX);
5592 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5594 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5595 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5596 target, 1, OPTAB_LIB_WIDEN);
5598 return expand_divmod (0,
5599 FLOAT_MODE_P (GET_MODE (value))
5600 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5601 GET_MODE (value), op1, op2, target, 0);
5604 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5608 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5612 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5616 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5617 target, 0, OPTAB_LIB_WIDEN);
5620 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5621 target, 1, OPTAB_LIB_WIDEN);
5624 if (GET_RTX_CLASS (code) == '1')
5626 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5627 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5630 #ifdef INSN_SCHEDULING
5631 /* On machines that have insn scheduling, we want all memory reference to be
5632 explicit, so we need to deal with such paradoxical SUBREGs. */
5633 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5634 && (GET_MODE_SIZE (GET_MODE (value))
5635 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5637 = simplify_gen_subreg (GET_MODE (value),
5638 force_reg (GET_MODE (SUBREG_REG (value)),
5639 force_operand (SUBREG_REG (value),
5641 GET_MODE (SUBREG_REG (value)),
5642 SUBREG_BYTE (value));
5648 /* Subroutine of expand_expr: return nonzero iff there is no way that
5649 EXP can reference X, which is being modified. TOP_P is nonzero if this
5650 call is going to be used to determine whether we need a temporary
5651 for EXP, as opposed to a recursive call to this function.
5653 It is always safe for this routine to return zero since it merely
5654 searches for optimization opportunities. */
5657 safe_from_p (x, exp, top_p)
5664 static tree save_expr_list;
5667 /* If EXP has varying size, we MUST use a target since we currently
5668 have no way of allocating temporaries of variable size
5669 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5670 So we assume here that something at a higher level has prevented a
5671 clash. This is somewhat bogus, but the best we can do. Only
5672 do this when X is BLKmode and when we are at the top level. */
5673 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5674 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5675 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5676 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5677 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5679 && GET_MODE (x) == BLKmode)
5680 /* If X is in the outgoing argument area, it is always safe. */
5681 || (GET_CODE (x) == MEM
5682 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5683 || (GET_CODE (XEXP (x, 0)) == PLUS
5684 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5687 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5688 find the underlying pseudo. */
5689 if (GET_CODE (x) == SUBREG)
5692 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5696 /* A SAVE_EXPR might appear many times in the expression passed to the
5697 top-level safe_from_p call, and if it has a complex subexpression,
5698 examining it multiple times could result in a combinatorial explosion.
5699 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5700 with optimization took about 28 minutes to compile -- even though it was
5701 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5702 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5703 we have processed. Note that the only test of top_p was above. */
5712 rtn = safe_from_p (x, exp, 0);
5714 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5715 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5720 /* Now look at our tree code and possibly recurse. */
5721 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5724 exp_rtl = DECL_RTL_IF_SET (exp);
5731 if (TREE_CODE (exp) == TREE_LIST)
5732 return ((TREE_VALUE (exp) == 0
5733 || safe_from_p (x, TREE_VALUE (exp), 0))
5734 && (TREE_CHAIN (exp) == 0
5735 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5736 else if (TREE_CODE (exp) == ERROR_MARK)
5737 return 1; /* An already-visited SAVE_EXPR? */
5742 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5746 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5747 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5751 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5752 the expression. If it is set, we conflict iff we are that rtx or
5753 both are in memory. Otherwise, we check all operands of the
5754 expression recursively. */
5756 switch (TREE_CODE (exp))
5759 /* If the operand is static or we are static, we can't conflict.
5760 Likewise if we don't conflict with the operand at all. */
5761 if (staticp (TREE_OPERAND (exp, 0))
5762 || TREE_STATIC (exp)
5763 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5766 /* Otherwise, the only way this can conflict is if we are taking
5767 the address of a DECL a that address if part of X, which is
5769 exp = TREE_OPERAND (exp, 0);
5772 if (!DECL_RTL_SET_P (exp)
5773 || GET_CODE (DECL_RTL (exp)) != MEM)
5776 exp_rtl = XEXP (DECL_RTL (exp), 0);
5781 if (GET_CODE (x) == MEM
5782 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5783 get_alias_set (exp)))
5788 /* Assume that the call will clobber all hard registers and
5790 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5791 || GET_CODE (x) == MEM)
5796 /* If a sequence exists, we would have to scan every instruction
5797 in the sequence to see if it was safe. This is probably not
5799 if (RTL_EXPR_SEQUENCE (exp))
5802 exp_rtl = RTL_EXPR_RTL (exp);
5805 case WITH_CLEANUP_EXPR:
5806 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5809 case CLEANUP_POINT_EXPR:
5810 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5813 exp_rtl = SAVE_EXPR_RTL (exp);
5817 /* If we've already scanned this, don't do it again. Otherwise,
5818 show we've scanned it and record for clearing the flag if we're
5820 if (TREE_PRIVATE (exp))
5823 TREE_PRIVATE (exp) = 1;
5824 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5826 TREE_PRIVATE (exp) = 0;
5830 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5834 /* The only operand we look at is operand 1. The rest aren't
5835 part of the expression. */
5836 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5838 case METHOD_CALL_EXPR:
5839 /* This takes an rtx argument, but shouldn't appear here. */
5846 /* If we have an rtx, we do not need to scan our operands. */
5850 nops = first_rtl_op (TREE_CODE (exp));
5851 for (i = 0; i < nops; i++)
5852 if (TREE_OPERAND (exp, i) != 0
5853 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5856 /* If this is a language-specific tree code, it may require
5857 special handling. */
5858 if ((unsigned int) TREE_CODE (exp)
5859 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5860 && !(*lang_hooks.safe_from_p) (x, exp))
5864 /* If we have an rtl, find any enclosed object. Then see if we conflict
5868 if (GET_CODE (exp_rtl) == SUBREG)
5870 exp_rtl = SUBREG_REG (exp_rtl);
5871 if (GET_CODE (exp_rtl) == REG
5872 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5876 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5877 are memory and they conflict. */
5878 return ! (rtx_equal_p (x, exp_rtl)
5879 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5880 && true_dependence (exp_rtl, VOIDmode, x,
5881 rtx_addr_varies_p)));
5884 /* If we reach here, it is safe. */
5888 /* Subroutine of expand_expr: return rtx if EXP is a
5889 variable or parameter; else return 0. */
5896 switch (TREE_CODE (exp))
5900 return DECL_RTL (exp);
5906 #ifdef MAX_INTEGER_COMPUTATION_MODE
5909 check_max_integer_computation_mode (exp)
5912 enum tree_code code;
5913 enum machine_mode mode;
5915 /* Strip any NOPs that don't change the mode. */
5917 code = TREE_CODE (exp);
5919 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5920 if (code == NOP_EXPR
5921 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5924 /* First check the type of the overall operation. We need only look at
5925 unary, binary and relational operations. */
5926 if (TREE_CODE_CLASS (code) == '1'
5927 || TREE_CODE_CLASS (code) == '2'
5928 || TREE_CODE_CLASS (code) == '<')
5930 mode = TYPE_MODE (TREE_TYPE (exp));
5931 if (GET_MODE_CLASS (mode) == MODE_INT
5932 && mode > MAX_INTEGER_COMPUTATION_MODE)
5933 internal_error ("unsupported wide integer operation");
5936 /* Check operand of a unary op. */
5937 if (TREE_CODE_CLASS (code) == '1')
5939 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5940 if (GET_MODE_CLASS (mode) == MODE_INT
5941 && mode > MAX_INTEGER_COMPUTATION_MODE)
5942 internal_error ("unsupported wide integer operation");
5945 /* Check operands of a binary/comparison op. */
5946 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5948 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5949 if (GET_MODE_CLASS (mode) == MODE_INT
5950 && mode > MAX_INTEGER_COMPUTATION_MODE)
5951 internal_error ("unsupported wide integer operation");
5953 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5954 if (GET_MODE_CLASS (mode) == MODE_INT
5955 && mode > MAX_INTEGER_COMPUTATION_MODE)
5956 internal_error ("unsupported wide integer operation");
5961 /* Return the highest power of two that EXP is known to be a multiple of.
5962 This is used in updating alignment of MEMs in array references. */
5964 static HOST_WIDE_INT
5965 highest_pow2_factor (exp)
5968 HOST_WIDE_INT c0, c1;
5970 switch (TREE_CODE (exp))
5973 /* We can find the lowest bit that's a one. If the low
5974 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5975 We need to handle this case since we can find it in a COND_EXPR,
5976 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5977 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5979 if (TREE_CONSTANT_OVERFLOW (exp))
5980 return BIGGEST_ALIGNMENT;
5983 /* Note: tree_low_cst is intentionally not used here,
5984 we don't care about the upper bits. */
5985 c0 = TREE_INT_CST_LOW (exp);
5987 return c0 ? c0 : BIGGEST_ALIGNMENT;
5991 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5992 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5993 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5994 return MIN (c0, c1);
5997 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5998 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6001 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6003 if (integer_pow2p (TREE_OPERAND (exp, 1))
6004 && host_integerp (TREE_OPERAND (exp, 1), 1))
6006 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6007 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6008 return MAX (1, c0 / c1);
6012 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6013 case SAVE_EXPR: case WITH_RECORD_EXPR:
6014 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6017 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6020 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6021 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6022 return MIN (c0, c1);
6031 /* Similar, except that it is known that the expression must be a multiple
6032 of the alignment of TYPE. */
6034 static HOST_WIDE_INT
6035 highest_pow2_factor_for_type (type, exp)
6039 HOST_WIDE_INT type_align, factor;
6041 factor = highest_pow2_factor (exp);
6042 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6043 return MAX (factor, type_align);
6046 /* Return an object on the placeholder list that matches EXP, a
6047 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6048 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6049 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6050 is a location which initially points to a starting location in the
6051 placeholder list (zero means start of the list) and where a pointer into
6052 the placeholder list at which the object is found is placed. */
6055 find_placeholder (exp, plist)
6059 tree type = TREE_TYPE (exp);
6060 tree placeholder_expr;
6062 for (placeholder_expr
6063 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6064 placeholder_expr != 0;
6065 placeholder_expr = TREE_CHAIN (placeholder_expr))
6067 tree need_type = TYPE_MAIN_VARIANT (type);
6070 /* Find the outermost reference that is of the type we want. If none,
6071 see if any object has a type that is a pointer to the type we
6073 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6074 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6075 || TREE_CODE (elt) == COND_EXPR)
6076 ? TREE_OPERAND (elt, 1)
6077 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6078 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6079 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6080 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6081 ? TREE_OPERAND (elt, 0) : 0))
6082 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6085 *plist = placeholder_expr;
6089 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6091 = ((TREE_CODE (elt) == COMPOUND_EXPR
6092 || TREE_CODE (elt) == COND_EXPR)
6093 ? TREE_OPERAND (elt, 1)
6094 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6095 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6096 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6097 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6098 ? TREE_OPERAND (elt, 0) : 0))
6099 if (POINTER_TYPE_P (TREE_TYPE (elt))
6100 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6104 *plist = placeholder_expr;
6105 return build1 (INDIRECT_REF, need_type, elt);
6112 /* expand_expr: generate code for computing expression EXP.
6113 An rtx for the computed value is returned. The value is never null.
6114 In the case of a void EXP, const0_rtx is returned.
6116 The value may be stored in TARGET if TARGET is nonzero.
6117 TARGET is just a suggestion; callers must assume that
6118 the rtx returned may not be the same as TARGET.
6120 If TARGET is CONST0_RTX, it means that the value will be ignored.
6122 If TMODE is not VOIDmode, it suggests generating the
6123 result in mode TMODE. But this is done only when convenient.
6124 Otherwise, TMODE is ignored and the value generated in its natural mode.
6125 TMODE is just a suggestion; callers must assume that
6126 the rtx returned may not have mode TMODE.
6128 Note that TARGET may have neither TMODE nor MODE. In that case, it
6129 probably will not be used.
6131 If MODIFIER is EXPAND_SUM then when EXP is an addition
6132 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6133 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6134 products as above, or REG or MEM, or constant.
6135 Ordinarily in such cases we would output mul or add instructions
6136 and then return a pseudo reg containing the sum.
6138 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6139 it also marks a label as absolutely required (it can't be dead).
6140 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6141 This is used for outputting expressions used in initializers.
6143 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6144 with a constant address even if that address is not normally legitimate.
6145 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6148 expand_expr (exp, target, tmode, modifier)
6151 enum machine_mode tmode;
6152 enum expand_modifier modifier;
6155 tree type = TREE_TYPE (exp);
6156 int unsignedp = TREE_UNSIGNED (type);
6157 enum machine_mode mode;
6158 enum tree_code code = TREE_CODE (exp);
6160 rtx subtarget, original_target;
6164 /* Handle ERROR_MARK before anybody tries to access its type. */
6165 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6167 op0 = CONST0_RTX (tmode);
6173 mode = TYPE_MODE (type);
6174 /* Use subtarget as the target for operand 0 of a binary operation. */
6175 subtarget = get_subtarget (target);
6176 original_target = target;
6177 ignore = (target == const0_rtx
6178 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6179 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6180 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6181 && TREE_CODE (type) == VOID_TYPE));
6183 /* If we are going to ignore this result, we need only do something
6184 if there is a side-effect somewhere in the expression. If there
6185 is, short-circuit the most common cases here. Note that we must
6186 not call expand_expr with anything but const0_rtx in case this
6187 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6191 if (! TREE_SIDE_EFFECTS (exp))
6194 /* Ensure we reference a volatile object even if value is ignored, but
6195 don't do this if all we are doing is taking its address. */
6196 if (TREE_THIS_VOLATILE (exp)
6197 && TREE_CODE (exp) != FUNCTION_DECL
6198 && mode != VOIDmode && mode != BLKmode
6199 && modifier != EXPAND_CONST_ADDRESS)
6201 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6202 if (GET_CODE (temp) == MEM)
6203 temp = copy_to_reg (temp);
6207 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6208 || code == INDIRECT_REF || code == BUFFER_REF)
6209 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6212 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6213 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6215 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6216 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6219 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6220 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6221 /* If the second operand has no side effects, just evaluate
6223 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6225 else if (code == BIT_FIELD_REF)
6227 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6228 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6229 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6236 #ifdef MAX_INTEGER_COMPUTATION_MODE
6237 /* Only check stuff here if the mode we want is different from the mode
6238 of the expression; if it's the same, check_max_integer_computiation_mode
6239 will handle it. Do we really need to check this stuff at all? */
6242 && GET_MODE (target) != mode
6243 && TREE_CODE (exp) != INTEGER_CST
6244 && TREE_CODE (exp) != PARM_DECL
6245 && TREE_CODE (exp) != ARRAY_REF
6246 && TREE_CODE (exp) != ARRAY_RANGE_REF
6247 && TREE_CODE (exp) != COMPONENT_REF
6248 && TREE_CODE (exp) != BIT_FIELD_REF
6249 && TREE_CODE (exp) != INDIRECT_REF
6250 && TREE_CODE (exp) != CALL_EXPR
6251 && TREE_CODE (exp) != VAR_DECL
6252 && TREE_CODE (exp) != RTL_EXPR)
6254 enum machine_mode mode = GET_MODE (target);
6256 if (GET_MODE_CLASS (mode) == MODE_INT
6257 && mode > MAX_INTEGER_COMPUTATION_MODE)
6258 internal_error ("unsupported wide integer operation");
6262 && TREE_CODE (exp) != INTEGER_CST
6263 && TREE_CODE (exp) != PARM_DECL
6264 && TREE_CODE (exp) != ARRAY_REF
6265 && TREE_CODE (exp) != ARRAY_RANGE_REF
6266 && TREE_CODE (exp) != COMPONENT_REF
6267 && TREE_CODE (exp) != BIT_FIELD_REF
6268 && TREE_CODE (exp) != INDIRECT_REF
6269 && TREE_CODE (exp) != VAR_DECL
6270 && TREE_CODE (exp) != CALL_EXPR
6271 && TREE_CODE (exp) != RTL_EXPR
6272 && GET_MODE_CLASS (tmode) == MODE_INT
6273 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6274 internal_error ("unsupported wide integer operation");
6276 check_max_integer_computation_mode (exp);
6279 /* If will do cse, generate all results into pseudo registers
6280 since 1) that allows cse to find more things
6281 and 2) otherwise cse could produce an insn the machine
6282 cannot support. And exception is a CONSTRUCTOR into a multi-word
6283 MEM: that's much more likely to be most efficient into the MEM. */
6285 if (! cse_not_expected && mode != BLKmode && target
6286 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6287 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6294 tree function = decl_function_context (exp);
6295 /* Handle using a label in a containing function. */
6296 if (function != current_function_decl
6297 && function != inline_function_decl && function != 0)
6299 struct function *p = find_function_data (function);
6300 p->expr->x_forced_labels
6301 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6302 p->expr->x_forced_labels);
6306 if (modifier == EXPAND_INITIALIZER)
6307 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6312 temp = gen_rtx_MEM (FUNCTION_MODE,
6313 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6314 if (function != current_function_decl
6315 && function != inline_function_decl && function != 0)
6316 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6321 if (DECL_RTL (exp) == 0)
6323 error_with_decl (exp, "prior parameter's size depends on `%s'");
6324 return CONST0_RTX (mode);
6327 /* ... fall through ... */
6330 /* If a static var's type was incomplete when the decl was written,
6331 but the type is complete now, lay out the decl now. */
6332 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6333 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6335 rtx value = DECL_RTL_IF_SET (exp);
6337 layout_decl (exp, 0);
6339 /* If the RTL was already set, update its mode and memory
6343 PUT_MODE (value, DECL_MODE (exp));
6344 SET_DECL_RTL (exp, 0);
6345 set_mem_attributes (value, exp, 1);
6346 SET_DECL_RTL (exp, value);
6350 /* ... fall through ... */
6354 if (DECL_RTL (exp) == 0)
6357 /* Ensure variable marked as used even if it doesn't go through
6358 a parser. If it hasn't be used yet, write out an external
6360 if (! TREE_USED (exp))
6362 assemble_external (exp);
6363 TREE_USED (exp) = 1;
6366 /* Show we haven't gotten RTL for this yet. */
6369 /* Handle variables inherited from containing functions. */
6370 context = decl_function_context (exp);
6372 /* We treat inline_function_decl as an alias for the current function
6373 because that is the inline function whose vars, types, etc.
6374 are being merged into the current function.
6375 See expand_inline_function. */
6377 if (context != 0 && context != current_function_decl
6378 && context != inline_function_decl
6379 /* If var is static, we don't need a static chain to access it. */
6380 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6381 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6385 /* Mark as non-local and addressable. */
6386 DECL_NONLOCAL (exp) = 1;
6387 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6389 (*lang_hooks.mark_addressable) (exp);
6390 if (GET_CODE (DECL_RTL (exp)) != MEM)
6392 addr = XEXP (DECL_RTL (exp), 0);
6393 if (GET_CODE (addr) == MEM)
6395 = replace_equiv_address (addr,
6396 fix_lexical_addr (XEXP (addr, 0), exp));
6398 addr = fix_lexical_addr (addr, exp);
6400 temp = replace_equiv_address (DECL_RTL (exp), addr);
6403 /* This is the case of an array whose size is to be determined
6404 from its initializer, while the initializer is still being parsed.
6407 else if (GET_CODE (DECL_RTL (exp)) == MEM
6408 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6409 temp = validize_mem (DECL_RTL (exp));
6411 /* If DECL_RTL is memory, we are in the normal case and either
6412 the address is not valid or it is not a register and -fforce-addr
6413 is specified, get the address into a register. */
6415 else if (GET_CODE (DECL_RTL (exp)) == MEM
6416 && modifier != EXPAND_CONST_ADDRESS
6417 && modifier != EXPAND_SUM
6418 && modifier != EXPAND_INITIALIZER
6419 && (! memory_address_p (DECL_MODE (exp),
6420 XEXP (DECL_RTL (exp), 0))
6422 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6423 temp = replace_equiv_address (DECL_RTL (exp),
6424 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6426 /* If we got something, return it. But first, set the alignment
6427 if the address is a register. */
6430 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6431 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6436 /* If the mode of DECL_RTL does not match that of the decl, it
6437 must be a promoted value. We return a SUBREG of the wanted mode,
6438 but mark it so that we know that it was already extended. */
6440 if (GET_CODE (DECL_RTL (exp)) == REG
6441 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6443 /* Get the signedness used for this variable. Ensure we get the
6444 same mode we got when the variable was declared. */
6445 if (GET_MODE (DECL_RTL (exp))
6446 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6447 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6450 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6451 SUBREG_PROMOTED_VAR_P (temp) = 1;
6452 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6456 return DECL_RTL (exp);
6459 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6460 TREE_INT_CST_HIGH (exp), mode);
6462 /* ??? If overflow is set, fold will have done an incomplete job,
6463 which can result in (plus xx (const_int 0)), which can get
6464 simplified by validate_replace_rtx during virtual register
6465 instantiation, which can result in unrecognizable insns.
6466 Avoid this by forcing all overflows into registers. */
6467 if (TREE_CONSTANT_OVERFLOW (exp)
6468 && modifier != EXPAND_INITIALIZER)
6469 temp = force_reg (mode, temp);
6474 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6477 /* If optimized, generate immediate CONST_DOUBLE
6478 which will be turned into memory by reload if necessary.
6480 We used to force a register so that loop.c could see it. But
6481 this does not allow gen_* patterns to perform optimizations with
6482 the constants. It also produces two insns in cases like "x = 1.0;".
6483 On most machines, floating-point constants are not permitted in
6484 many insns, so we'd end up copying it to a register in any case.
6486 Now, we do the copying in expand_binop, if appropriate. */
6487 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6488 TYPE_MODE (TREE_TYPE (exp)));
6492 if (! TREE_CST_RTL (exp))
6493 output_constant_def (exp, 1);
6495 /* TREE_CST_RTL probably contains a constant address.
6496 On RISC machines where a constant address isn't valid,
6497 make some insns to get that address into a register. */
6498 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6499 && modifier != EXPAND_CONST_ADDRESS
6500 && modifier != EXPAND_INITIALIZER
6501 && modifier != EXPAND_SUM
6502 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6504 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6505 return replace_equiv_address (TREE_CST_RTL (exp),
6506 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6507 return TREE_CST_RTL (exp);
6509 case EXPR_WITH_FILE_LOCATION:
6512 const char *saved_input_filename = input_filename;
6513 int saved_lineno = lineno;
6514 input_filename = EXPR_WFL_FILENAME (exp);
6515 lineno = EXPR_WFL_LINENO (exp);
6516 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6517 emit_line_note (input_filename, lineno);
6518 /* Possibly avoid switching back and forth here. */
6519 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6520 input_filename = saved_input_filename;
6521 lineno = saved_lineno;
6526 context = decl_function_context (exp);
6528 /* If this SAVE_EXPR was at global context, assume we are an
6529 initialization function and move it into our context. */
6531 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6533 /* We treat inline_function_decl as an alias for the current function
6534 because that is the inline function whose vars, types, etc.
6535 are being merged into the current function.
6536 See expand_inline_function. */
6537 if (context == current_function_decl || context == inline_function_decl)
6540 /* If this is non-local, handle it. */
6543 /* The following call just exists to abort if the context is
6544 not of a containing function. */
6545 find_function_data (context);
6547 temp = SAVE_EXPR_RTL (exp);
6548 if (temp && GET_CODE (temp) == REG)
6550 put_var_into_stack (exp);
6551 temp = SAVE_EXPR_RTL (exp);
6553 if (temp == 0 || GET_CODE (temp) != MEM)
6556 replace_equiv_address (temp,
6557 fix_lexical_addr (XEXP (temp, 0), exp));
6559 if (SAVE_EXPR_RTL (exp) == 0)
6561 if (mode == VOIDmode)
6564 temp = assign_temp (build_qualified_type (type,
6566 | TYPE_QUAL_CONST)),
6569 SAVE_EXPR_RTL (exp) = temp;
6570 if (!optimize && GET_CODE (temp) == REG)
6571 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6574 /* If the mode of TEMP does not match that of the expression, it
6575 must be a promoted value. We pass store_expr a SUBREG of the
6576 wanted mode but mark it so that we know that it was already
6577 extended. Note that `unsignedp' was modified above in
6580 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6582 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6583 SUBREG_PROMOTED_VAR_P (temp) = 1;
6584 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6587 if (temp == const0_rtx)
6588 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6590 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6592 TREE_USED (exp) = 1;
6595 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6596 must be a promoted value. We return a SUBREG of the wanted mode,
6597 but mark it so that we know that it was already extended. */
6599 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6600 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6602 /* Compute the signedness and make the proper SUBREG. */
6603 promote_mode (type, mode, &unsignedp, 0);
6604 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6605 SUBREG_PROMOTED_VAR_P (temp) = 1;
6606 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6610 return SAVE_EXPR_RTL (exp);
6615 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6616 TREE_OPERAND (exp, 0)
6617 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6621 case PLACEHOLDER_EXPR:
6623 tree old_list = placeholder_list;
6624 tree placeholder_expr = 0;
6626 exp = find_placeholder (exp, &placeholder_expr);
6630 placeholder_list = TREE_CHAIN (placeholder_expr);
6631 temp = expand_expr (exp, original_target, tmode, modifier);
6632 placeholder_list = old_list;
6636 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6639 case WITH_RECORD_EXPR:
6640 /* Put the object on the placeholder list, expand our first operand,
6641 and pop the list. */
6642 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6644 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6646 placeholder_list = TREE_CHAIN (placeholder_list);
6650 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6651 expand_goto (TREE_OPERAND (exp, 0));
6653 expand_computed_goto (TREE_OPERAND (exp, 0));
6657 expand_exit_loop_if_false (NULL,
6658 invert_truthvalue (TREE_OPERAND (exp, 0)));
6661 case LABELED_BLOCK_EXPR:
6662 if (LABELED_BLOCK_BODY (exp))
6663 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6664 /* Should perhaps use expand_label, but this is simpler and safer. */
6665 do_pending_stack_adjust ();
6666 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6669 case EXIT_BLOCK_EXPR:
6670 if (EXIT_BLOCK_RETURN (exp))
6671 sorry ("returned value in block_exit_expr");
6672 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6677 expand_start_loop (1);
6678 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6686 tree vars = TREE_OPERAND (exp, 0);
6687 int vars_need_expansion = 0;
6689 /* Need to open a binding contour here because
6690 if there are any cleanups they must be contained here. */
6691 expand_start_bindings (2);
6693 /* Mark the corresponding BLOCK for output in its proper place. */
6694 if (TREE_OPERAND (exp, 2) != 0
6695 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6696 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6698 /* If VARS have not yet been expanded, expand them now. */
6701 if (!DECL_RTL_SET_P (vars))
6703 vars_need_expansion = 1;
6706 expand_decl_init (vars);
6707 vars = TREE_CHAIN (vars);
6710 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6712 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6718 if (RTL_EXPR_SEQUENCE (exp))
6720 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6722 emit_insns (RTL_EXPR_SEQUENCE (exp));
6723 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6725 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6726 free_temps_for_rtl_expr (exp);
6727 return RTL_EXPR_RTL (exp);
6730 /* If we don't need the result, just ensure we evaluate any
6736 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6737 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6742 /* All elts simple constants => refer to a constant in memory. But
6743 if this is a non-BLKmode mode, let it store a field at a time
6744 since that should make a CONST_INT or CONST_DOUBLE when we
6745 fold. Likewise, if we have a target we can use, it is best to
6746 store directly into the target unless the type is large enough
6747 that memcpy will be used. If we are making an initializer and
6748 all operands are constant, put it in memory as well.
6750 FIXME: Avoid trying to fill vector constructors piece-meal.
6751 Output them with output_constant_def below unless we're sure
6752 they're zeros. This should go away when vector initializers
6753 are treated like VECTOR_CST instead of arrays.
6755 else if ((TREE_STATIC (exp)
6756 && ((mode == BLKmode
6757 && ! (target != 0 && safe_from_p (target, exp, 1)))
6758 || TREE_ADDRESSABLE (exp)
6759 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6760 && (! MOVE_BY_PIECES_P
6761 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6763 && ((TREE_CODE (type) == VECTOR_TYPE
6764 && !is_zeros_p (exp))
6765 || ! mostly_zeros_p (exp)))))
6766 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6768 rtx constructor = output_constant_def (exp, 1);
6770 if (modifier != EXPAND_CONST_ADDRESS
6771 && modifier != EXPAND_INITIALIZER
6772 && modifier != EXPAND_SUM)
6773 constructor = validize_mem (constructor);
6779 /* Handle calls that pass values in multiple non-contiguous
6780 locations. The Irix 6 ABI has examples of this. */
6781 if (target == 0 || ! safe_from_p (target, exp, 1)
6782 || GET_CODE (target) == PARALLEL)
6784 = assign_temp (build_qualified_type (type,
6786 | (TREE_READONLY (exp)
6787 * TYPE_QUAL_CONST))),
6788 0, TREE_ADDRESSABLE (exp), 1);
6790 store_constructor (exp, target, 0,
6791 int_size_in_bytes (TREE_TYPE (exp)));
6797 tree exp1 = TREE_OPERAND (exp, 0);
6799 tree string = string_constant (exp1, &index);
6801 /* Try to optimize reads from const strings. */
6803 && TREE_CODE (string) == STRING_CST
6804 && TREE_CODE (index) == INTEGER_CST
6805 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6806 && GET_MODE_CLASS (mode) == MODE_INT
6807 && GET_MODE_SIZE (mode) == 1
6808 && modifier != EXPAND_WRITE)
6809 return gen_int_mode (TREE_STRING_POINTER (string)
6810 [TREE_INT_CST_LOW (index)], mode);
6812 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6813 op0 = memory_address (mode, op0);
6814 temp = gen_rtx_MEM (mode, op0);
6815 set_mem_attributes (temp, exp, 0);
6817 /* If we are writing to this object and its type is a record with
6818 readonly fields, we must mark it as readonly so it will
6819 conflict with readonly references to those fields. */
6820 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6821 RTX_UNCHANGING_P (temp) = 1;
6827 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6831 tree array = TREE_OPERAND (exp, 0);
6832 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6833 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6834 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6837 /* Optimize the special-case of a zero lower bound.
6839 We convert the low_bound to sizetype to avoid some problems
6840 with constant folding. (E.g. suppose the lower bound is 1,
6841 and its mode is QI. Without the conversion, (ARRAY
6842 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6843 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6845 if (! integer_zerop (low_bound))
6846 index = size_diffop (index, convert (sizetype, low_bound));
6848 /* Fold an expression like: "foo"[2].
6849 This is not done in fold so it won't happen inside &.
6850 Don't fold if this is for wide characters since it's too
6851 difficult to do correctly and this is a very rare case. */
6853 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6854 && TREE_CODE (array) == STRING_CST
6855 && TREE_CODE (index) == INTEGER_CST
6856 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6857 && GET_MODE_CLASS (mode) == MODE_INT
6858 && GET_MODE_SIZE (mode) == 1)
6859 return gen_int_mode (TREE_STRING_POINTER (array)
6860 [TREE_INT_CST_LOW (index)], mode);
6862 /* If this is a constant index into a constant array,
6863 just get the value from the array. Handle both the cases when
6864 we have an explicit constructor and when our operand is a variable
6865 that was declared const. */
6867 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6868 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6869 && TREE_CODE (index) == INTEGER_CST
6870 && 0 > compare_tree_int (index,
6871 list_length (CONSTRUCTOR_ELTS
6872 (TREE_OPERAND (exp, 0)))))
6876 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6877 i = TREE_INT_CST_LOW (index);
6878 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6882 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6886 else if (optimize >= 1
6887 && modifier != EXPAND_CONST_ADDRESS
6888 && modifier != EXPAND_INITIALIZER
6889 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6890 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6891 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6893 if (TREE_CODE (index) == INTEGER_CST)
6895 tree init = DECL_INITIAL (array);
6897 if (TREE_CODE (init) == CONSTRUCTOR)
6901 for (elem = CONSTRUCTOR_ELTS (init);
6903 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6904 elem = TREE_CHAIN (elem))
6907 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6908 return expand_expr (fold (TREE_VALUE (elem)), target,
6911 else if (TREE_CODE (init) == STRING_CST
6912 && 0 > compare_tree_int (index,
6913 TREE_STRING_LENGTH (init)))
6915 tree type = TREE_TYPE (TREE_TYPE (init));
6916 enum machine_mode mode = TYPE_MODE (type);
6918 if (GET_MODE_CLASS (mode) == MODE_INT
6919 && GET_MODE_SIZE (mode) == 1)
6920 return gen_int_mode (TREE_STRING_POINTER (init)
6921 [TREE_INT_CST_LOW (index)], mode);
6930 case ARRAY_RANGE_REF:
6931 /* If the operand is a CONSTRUCTOR, we can just extract the
6932 appropriate field if it is present. Don't do this if we have
6933 already written the data since we want to refer to that copy
6934 and varasm.c assumes that's what we'll do. */
6935 if (code == COMPONENT_REF
6936 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6937 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6941 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6942 elt = TREE_CHAIN (elt))
6943 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6944 /* We can normally use the value of the field in the
6945 CONSTRUCTOR. However, if this is a bitfield in
6946 an integral mode that we can fit in a HOST_WIDE_INT,
6947 we must mask only the number of bits in the bitfield,
6948 since this is done implicitly by the constructor. If
6949 the bitfield does not meet either of those conditions,
6950 we can't do this optimization. */
6951 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6952 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6954 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6955 <= HOST_BITS_PER_WIDE_INT))))
6957 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6958 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6960 HOST_WIDE_INT bitsize
6961 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6962 enum machine_mode imode
6963 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6965 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6967 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6968 op0 = expand_and (imode, op0, op1, target);
6973 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6976 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6978 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6988 enum machine_mode mode1;
6989 HOST_WIDE_INT bitsize, bitpos;
6992 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6993 &mode1, &unsignedp, &volatilep);
6996 /* If we got back the original object, something is wrong. Perhaps
6997 we are evaluating an expression too early. In any event, don't
6998 infinitely recurse. */
7002 /* If TEM's type is a union of variable size, pass TARGET to the inner
7003 computation, since it will need a temporary and TARGET is known
7004 to have to do. This occurs in unchecked conversion in Ada. */
7008 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7009 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7011 ? target : NULL_RTX),
7013 (modifier == EXPAND_INITIALIZER
7014 || modifier == EXPAND_CONST_ADDRESS)
7015 ? modifier : EXPAND_NORMAL);
7017 /* If this is a constant, put it into a register if it is a
7018 legitimate constant and OFFSET is 0 and memory if it isn't. */
7019 if (CONSTANT_P (op0))
7021 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7022 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7024 op0 = force_reg (mode, op0);
7026 op0 = validize_mem (force_const_mem (mode, op0));
7031 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7033 /* If this object is in a register, put it into memory.
7034 This case can't occur in C, but can in Ada if we have
7035 unchecked conversion of an expression from a scalar type to
7036 an array or record type. */
7037 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7038 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7040 /* If the operand is a SAVE_EXPR, we can deal with this by
7041 forcing the SAVE_EXPR into memory. */
7042 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7044 put_var_into_stack (TREE_OPERAND (exp, 0));
7045 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7050 = build_qualified_type (TREE_TYPE (tem),
7051 (TYPE_QUALS (TREE_TYPE (tem))
7052 | TYPE_QUAL_CONST));
7053 rtx memloc = assign_temp (nt, 1, 1, 1);
7055 emit_move_insn (memloc, op0);
7060 if (GET_CODE (op0) != MEM)
7063 #ifdef POINTERS_EXTEND_UNSIGNED
7064 if (GET_MODE (offset_rtx) != Pmode)
7065 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7067 if (GET_MODE (offset_rtx) != ptr_mode)
7068 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7071 /* A constant address in OP0 can have VOIDmode, we must not try
7072 to call force_reg for that case. Avoid that case. */
7073 if (GET_CODE (op0) == MEM
7074 && GET_MODE (op0) == BLKmode
7075 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7077 && (bitpos % bitsize) == 0
7078 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7079 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7081 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7085 op0 = offset_address (op0, offset_rtx,
7086 highest_pow2_factor (offset));
7089 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7090 record its alignment as BIGGEST_ALIGNMENT. */
7091 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7092 && is_aligning_offset (offset, tem))
7093 set_mem_align (op0, BIGGEST_ALIGNMENT);
7095 /* Don't forget about volatility even if this is a bitfield. */
7096 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7098 if (op0 == orig_op0)
7099 op0 = copy_rtx (op0);
7101 MEM_VOLATILE_P (op0) = 1;
7104 /* The following code doesn't handle CONCAT.
7105 Assume only bitpos == 0 can be used for CONCAT, due to
7106 one element arrays having the same mode as its element. */
7107 if (GET_CODE (op0) == CONCAT)
7109 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7114 /* In cases where an aligned union has an unaligned object
7115 as a field, we might be extracting a BLKmode value from
7116 an integer-mode (e.g., SImode) object. Handle this case
7117 by doing the extract into an object as wide as the field
7118 (which we know to be the width of a basic mode), then
7119 storing into memory, and changing the mode to BLKmode. */
7120 if (mode1 == VOIDmode
7121 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7122 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7123 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7124 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7125 && modifier != EXPAND_CONST_ADDRESS
7126 && modifier != EXPAND_INITIALIZER)
7127 /* If the field isn't aligned enough to fetch as a memref,
7128 fetch it as a bit field. */
7129 || (mode1 != BLKmode
7130 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7131 && ((TYPE_ALIGN (TREE_TYPE (tem))
7132 < GET_MODE_ALIGNMENT (mode))
7133 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7134 /* If the type and the field are a constant size and the
7135 size of the type isn't the same size as the bitfield,
7136 we must use bitfield operations. */
7138 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7140 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7143 enum machine_mode ext_mode = mode;
7145 if (ext_mode == BLKmode
7146 && ! (target != 0 && GET_CODE (op0) == MEM
7147 && GET_CODE (target) == MEM
7148 && bitpos % BITS_PER_UNIT == 0))
7149 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7151 if (ext_mode == BLKmode)
7153 /* In this case, BITPOS must start at a byte boundary and
7154 TARGET, if specified, must be a MEM. */
7155 if (GET_CODE (op0) != MEM
7156 || (target != 0 && GET_CODE (target) != MEM)
7157 || bitpos % BITS_PER_UNIT != 0)
7160 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7162 target = assign_temp (type, 0, 1, 1);
7164 emit_block_move (target, op0,
7165 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7171 op0 = validize_mem (op0);
7173 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7174 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7176 op0 = extract_bit_field (op0, bitsize, bitpos,
7177 unsignedp, target, ext_mode, ext_mode,
7178 int_size_in_bytes (TREE_TYPE (tem)));
7180 /* If the result is a record type and BITSIZE is narrower than
7181 the mode of OP0, an integral mode, and this is a big endian
7182 machine, we must put the field into the high-order bits. */
7183 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7184 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7185 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7186 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7187 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7191 if (mode == BLKmode)
7193 rtx new = assign_temp (build_qualified_type
7194 ((*lang_hooks.types.type_for_mode)
7196 TYPE_QUAL_CONST), 0, 1, 1);
7198 emit_move_insn (new, op0);
7199 op0 = copy_rtx (new);
7200 PUT_MODE (op0, BLKmode);
7201 set_mem_attributes (op0, exp, 1);
7207 /* If the result is BLKmode, use that to access the object
7209 if (mode == BLKmode)
7212 /* Get a reference to just this component. */
7213 if (modifier == EXPAND_CONST_ADDRESS
7214 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7215 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7217 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7219 if (op0 == orig_op0)
7220 op0 = copy_rtx (op0);
7222 set_mem_attributes (op0, exp, 0);
7223 if (GET_CODE (XEXP (op0, 0)) == REG)
7224 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7226 MEM_VOLATILE_P (op0) |= volatilep;
7227 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7228 || modifier == EXPAND_CONST_ADDRESS
7229 || modifier == EXPAND_INITIALIZER)
7231 else if (target == 0)
7232 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7234 convert_move (target, op0, unsignedp);
7240 rtx insn, before = get_last_insn (), vtbl_ref;
7242 /* Evaluate the interior expression. */
7243 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7246 /* Get or create an instruction off which to hang a note. */
7247 if (REG_P (subtarget))
7250 insn = get_last_insn ();
7253 if (! INSN_P (insn))
7254 insn = prev_nonnote_insn (insn);
7258 target = gen_reg_rtx (GET_MODE (subtarget));
7259 insn = emit_move_insn (target, subtarget);
7262 /* Collect the data for the note. */
7263 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7264 vtbl_ref = plus_constant (vtbl_ref,
7265 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7266 /* Discard the initial CONST that was added. */
7267 vtbl_ref = XEXP (vtbl_ref, 0);
7270 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7275 /* Intended for a reference to a buffer of a file-object in Pascal.
7276 But it's not certain that a special tree code will really be
7277 necessary for these. INDIRECT_REF might work for them. */
7283 /* Pascal set IN expression.
7286 rlo = set_low - (set_low%bits_per_word);
7287 the_word = set [ (index - rlo)/bits_per_word ];
7288 bit_index = index % bits_per_word;
7289 bitmask = 1 << bit_index;
7290 return !!(the_word & bitmask); */
7292 tree set = TREE_OPERAND (exp, 0);
7293 tree index = TREE_OPERAND (exp, 1);
7294 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7295 tree set_type = TREE_TYPE (set);
7296 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7297 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7298 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7299 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7300 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7301 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7302 rtx setaddr = XEXP (setval, 0);
7303 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7305 rtx diff, quo, rem, addr, bit, result;
7307 /* If domain is empty, answer is no. Likewise if index is constant
7308 and out of bounds. */
7309 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7310 && TREE_CODE (set_low_bound) == INTEGER_CST
7311 && tree_int_cst_lt (set_high_bound, set_low_bound))
7312 || (TREE_CODE (index) == INTEGER_CST
7313 && TREE_CODE (set_low_bound) == INTEGER_CST
7314 && tree_int_cst_lt (index, set_low_bound))
7315 || (TREE_CODE (set_high_bound) == INTEGER_CST
7316 && TREE_CODE (index) == INTEGER_CST
7317 && tree_int_cst_lt (set_high_bound, index))))
7321 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7323 /* If we get here, we have to generate the code for both cases
7324 (in range and out of range). */
7326 op0 = gen_label_rtx ();
7327 op1 = gen_label_rtx ();
7329 if (! (GET_CODE (index_val) == CONST_INT
7330 && GET_CODE (lo_r) == CONST_INT))
7331 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7332 GET_MODE (index_val), iunsignedp, op1);
7334 if (! (GET_CODE (index_val) == CONST_INT
7335 && GET_CODE (hi_r) == CONST_INT))
7336 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7337 GET_MODE (index_val), iunsignedp, op1);
7339 /* Calculate the element number of bit zero in the first word
7341 if (GET_CODE (lo_r) == CONST_INT)
7342 rlow = GEN_INT (INTVAL (lo_r)
7343 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7345 rlow = expand_binop (index_mode, and_optab, lo_r,
7346 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7347 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7349 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7350 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7352 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7353 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7354 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7355 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7357 addr = memory_address (byte_mode,
7358 expand_binop (index_mode, add_optab, diff,
7359 setaddr, NULL_RTX, iunsignedp,
7362 /* Extract the bit we want to examine. */
7363 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7364 gen_rtx_MEM (byte_mode, addr),
7365 make_tree (TREE_TYPE (index), rem),
7367 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7368 GET_MODE (target) == byte_mode ? target : 0,
7369 1, OPTAB_LIB_WIDEN);
7371 if (result != target)
7372 convert_move (target, result, 1);
7374 /* Output the code to handle the out-of-range case. */
7377 emit_move_insn (target, const0_rtx);
7382 case WITH_CLEANUP_EXPR:
7383 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7385 WITH_CLEANUP_EXPR_RTL (exp)
7386 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7387 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7388 CLEANUP_EH_ONLY (exp));
7390 /* That's it for this cleanup. */
7391 TREE_OPERAND (exp, 1) = 0;
7393 return WITH_CLEANUP_EXPR_RTL (exp);
7395 case CLEANUP_POINT_EXPR:
7397 /* Start a new binding layer that will keep track of all cleanup
7398 actions to be performed. */
7399 expand_start_bindings (2);
7401 target_temp_slot_level = temp_slot_level;
7403 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7404 /* If we're going to use this value, load it up now. */
7406 op0 = force_not_mem (op0);
7407 preserve_temp_slots (op0);
7408 expand_end_bindings (NULL_TREE, 0, 0);
7413 /* Check for a built-in function. */
7414 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7415 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7417 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7419 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7420 == BUILT_IN_FRONTEND)
7421 return (*lang_hooks.expand_expr)
7422 (exp, original_target, tmode, modifier);
7424 return expand_builtin (exp, target, subtarget, tmode, ignore);
7427 return expand_call (exp, target, ignore);
7429 case NON_LVALUE_EXPR:
7432 case REFERENCE_EXPR:
7433 if (TREE_OPERAND (exp, 0) == error_mark_node)
7436 if (TREE_CODE (type) == UNION_TYPE)
7438 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7440 /* If both input and output are BLKmode, this conversion isn't doing
7441 anything except possibly changing memory attribute. */
7442 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7444 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7447 result = copy_rtx (result);
7448 set_mem_attributes (result, exp, 0);
7453 target = assign_temp (type, 0, 1, 1);
7455 if (GET_CODE (target) == MEM)
7456 /* Store data into beginning of memory target. */
7457 store_expr (TREE_OPERAND (exp, 0),
7458 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7460 else if (GET_CODE (target) == REG)
7461 /* Store this field into a union of the proper type. */
7462 store_field (target,
7463 MIN ((int_size_in_bytes (TREE_TYPE
7464 (TREE_OPERAND (exp, 0)))
7466 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7467 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7468 VOIDmode, 0, type, 0);
7472 /* Return the entire union. */
7476 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7478 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7481 /* If the signedness of the conversion differs and OP0 is
7482 a promoted SUBREG, clear that indication since we now
7483 have to do the proper extension. */
7484 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7485 && GET_CODE (op0) == SUBREG)
7486 SUBREG_PROMOTED_VAR_P (op0) = 0;
7491 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7492 if (GET_MODE (op0) == mode)
7495 /* If OP0 is a constant, just convert it into the proper mode. */
7496 if (CONSTANT_P (op0))
7498 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7499 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7501 if (modifier == EXPAND_INITIALIZER)
7502 return simplify_gen_subreg (mode, op0, inner_mode,
7503 subreg_lowpart_offset (mode,
7506 return convert_modes (mode, inner_mode, op0,
7507 TREE_UNSIGNED (inner_type));
7510 if (modifier == EXPAND_INITIALIZER)
7511 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7515 convert_to_mode (mode, op0,
7516 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7518 convert_move (target, op0,
7519 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7522 case VIEW_CONVERT_EXPR:
7523 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7525 /* If the input and output modes are both the same, we are done.
7526 Otherwise, if neither mode is BLKmode and both are within a word, we
7527 can use gen_lowpart. If neither is true, make sure the operand is
7528 in memory and convert the MEM to the new mode. */
7529 if (TYPE_MODE (type) == GET_MODE (op0))
7531 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7532 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7533 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7534 op0 = gen_lowpart (TYPE_MODE (type), op0);
7535 else if (GET_CODE (op0) != MEM)
7537 /* If the operand is not a MEM, force it into memory. Since we
7538 are going to be be changing the mode of the MEM, don't call
7539 force_const_mem for constants because we don't allow pool
7540 constants to change mode. */
7541 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7543 if (TREE_ADDRESSABLE (exp))
7546 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7548 = assign_stack_temp_for_type
7549 (TYPE_MODE (inner_type),
7550 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7552 emit_move_insn (target, op0);
7556 /* At this point, OP0 is in the correct mode. If the output type is such
7557 that the operand is known to be aligned, indicate that it is.
7558 Otherwise, we need only be concerned about alignment for non-BLKmode
7560 if (GET_CODE (op0) == MEM)
7562 op0 = copy_rtx (op0);
7564 if (TYPE_ALIGN_OK (type))
7565 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7566 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7567 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7569 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7570 HOST_WIDE_INT temp_size
7571 = MAX (int_size_in_bytes (inner_type),
7572 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7573 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7574 temp_size, 0, type);
7575 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7577 if (TREE_ADDRESSABLE (exp))
7580 if (GET_MODE (op0) == BLKmode)
7581 emit_block_move (new_with_op0_mode, op0,
7582 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7584 emit_move_insn (new_with_op0_mode, op0);
7589 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7595 /* We come here from MINUS_EXPR when the second operand is a
7598 this_optab = ! unsignedp && flag_trapv
7599 && (GET_MODE_CLASS (mode) == MODE_INT)
7600 ? addv_optab : add_optab;
7602 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7603 something else, make sure we add the register to the constant and
7604 then to the other thing. This case can occur during strength
7605 reduction and doing it this way will produce better code if the
7606 frame pointer or argument pointer is eliminated.
7608 fold-const.c will ensure that the constant is always in the inner
7609 PLUS_EXPR, so the only case we need to do anything about is if
7610 sp, ap, or fp is our second argument, in which case we must swap
7611 the innermost first argument and our second argument. */
7613 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7614 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7615 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7616 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7617 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7618 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7620 tree t = TREE_OPERAND (exp, 1);
7622 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7623 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7626 /* If the result is to be ptr_mode and we are adding an integer to
7627 something, we might be forming a constant. So try to use
7628 plus_constant. If it produces a sum and we can't accept it,
7629 use force_operand. This allows P = &ARR[const] to generate
7630 efficient code on machines where a SYMBOL_REF is not a valid
7633 If this is an EXPAND_SUM call, always return the sum. */
7634 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7635 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7637 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7638 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7639 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7643 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7645 /* Use immed_double_const to ensure that the constant is
7646 truncated according to the mode of OP1, then sign extended
7647 to a HOST_WIDE_INT. Using the constant directly can result
7648 in non-canonical RTL in a 64x32 cross compile. */
7650 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7652 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7653 op1 = plus_constant (op1, INTVAL (constant_part));
7654 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7655 op1 = force_operand (op1, target);
7659 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7660 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7661 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7665 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7666 (modifier == EXPAND_INITIALIZER
7667 ? EXPAND_INITIALIZER : EXPAND_SUM));
7668 if (! CONSTANT_P (op0))
7670 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7671 VOIDmode, modifier);
7672 /* Don't go to both_summands if modifier
7673 says it's not right to return a PLUS. */
7674 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7678 /* Use immed_double_const to ensure that the constant is
7679 truncated according to the mode of OP1, then sign extended
7680 to a HOST_WIDE_INT. Using the constant directly can result
7681 in non-canonical RTL in a 64x32 cross compile. */
7683 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7685 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7686 op0 = plus_constant (op0, INTVAL (constant_part));
7687 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7688 op0 = force_operand (op0, target);
7693 /* No sense saving up arithmetic to be done
7694 if it's all in the wrong mode to form part of an address.
7695 And force_operand won't know whether to sign-extend or
7697 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7698 || mode != ptr_mode)
7701 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7704 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7705 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7708 /* Make sure any term that's a sum with a constant comes last. */
7709 if (GET_CODE (op0) == PLUS
7710 && CONSTANT_P (XEXP (op0, 1)))
7716 /* If adding to a sum including a constant,
7717 associate it to put the constant outside. */
7718 if (GET_CODE (op1) == PLUS
7719 && CONSTANT_P (XEXP (op1, 1)))
7721 rtx constant_term = const0_rtx;
7723 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7726 /* Ensure that MULT comes first if there is one. */
7727 else if (GET_CODE (op0) == MULT)
7728 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7730 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7732 /* Let's also eliminate constants from op0 if possible. */
7733 op0 = eliminate_constant_term (op0, &constant_term);
7735 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7736 their sum should be a constant. Form it into OP1, since the
7737 result we want will then be OP0 + OP1. */
7739 temp = simplify_binary_operation (PLUS, mode, constant_term,
7744 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7747 /* Put a constant term last and put a multiplication first. */
7748 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7749 temp = op1, op1 = op0, op0 = temp;
7751 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7752 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7755 /* For initializers, we are allowed to return a MINUS of two
7756 symbolic constants. Here we handle all cases when both operands
7758 /* Handle difference of two symbolic constants,
7759 for the sake of an initializer. */
7760 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7761 && really_constant_p (TREE_OPERAND (exp, 0))
7762 && really_constant_p (TREE_OPERAND (exp, 1)))
7764 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7766 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7769 /* If the last operand is a CONST_INT, use plus_constant of
7770 the negated constant. Else make the MINUS. */
7771 if (GET_CODE (op1) == CONST_INT)
7772 return plus_constant (op0, - INTVAL (op1));
7774 return gen_rtx_MINUS (mode, op0, op1);
7776 /* Convert A - const to A + (-const). */
7777 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7779 tree negated = fold (build1 (NEGATE_EXPR, type,
7780 TREE_OPERAND (exp, 1)));
7782 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7783 /* If we can't negate the constant in TYPE, leave it alone and
7784 expand_binop will negate it for us. We used to try to do it
7785 here in the signed version of TYPE, but that doesn't work
7786 on POINTER_TYPEs. */;
7789 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7793 this_optab = ! unsignedp && flag_trapv
7794 && (GET_MODE_CLASS(mode) == MODE_INT)
7795 ? subv_optab : sub_optab;
7799 /* If first operand is constant, swap them.
7800 Thus the following special case checks need only
7801 check the second operand. */
7802 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7804 tree t1 = TREE_OPERAND (exp, 0);
7805 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7806 TREE_OPERAND (exp, 1) = t1;
7809 /* Attempt to return something suitable for generating an
7810 indexed address, for machines that support that. */
7812 if (modifier == EXPAND_SUM && mode == ptr_mode
7813 && host_integerp (TREE_OPERAND (exp, 1), 0))
7815 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7818 /* If we knew for certain that this is arithmetic for an array
7819 reference, and we knew the bounds of the array, then we could
7820 apply the distributive law across (PLUS X C) for constant C.
7821 Without such knowledge, we risk overflowing the computation
7822 when both X and C are large, but X+C isn't. */
7823 /* ??? Could perhaps special-case EXP being unsigned and C being
7824 positive. In that case we are certain that X+C is no smaller
7825 than X and so the transformed expression will overflow iff the
7826 original would have. */
7828 if (GET_CODE (op0) != REG)
7829 op0 = force_operand (op0, NULL_RTX);
7830 if (GET_CODE (op0) != REG)
7831 op0 = copy_to_mode_reg (mode, op0);
7834 gen_rtx_MULT (mode, op0,
7835 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7838 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7841 /* Check for multiplying things that have been extended
7842 from a narrower type. If this machine supports multiplying
7843 in that narrower type with a result in the desired type,
7844 do it that way, and avoid the explicit type-conversion. */
7845 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7846 && TREE_CODE (type) == INTEGER_TYPE
7847 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7848 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7849 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7850 && int_fits_type_p (TREE_OPERAND (exp, 1),
7851 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7852 /* Don't use a widening multiply if a shift will do. */
7853 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7854 > HOST_BITS_PER_WIDE_INT)
7855 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7857 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7858 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7860 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7861 /* If both operands are extended, they must either both
7862 be zero-extended or both be sign-extended. */
7863 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7865 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7867 enum machine_mode innermode
7868 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7869 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7870 ? smul_widen_optab : umul_widen_optab);
7871 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7872 ? umul_widen_optab : smul_widen_optab);
7873 if (mode == GET_MODE_WIDER_MODE (innermode))
7875 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7877 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7878 NULL_RTX, VOIDmode, 0);
7879 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7880 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7883 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7884 NULL_RTX, VOIDmode, 0);
7887 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7888 && innermode == word_mode)
7891 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7892 NULL_RTX, VOIDmode, 0);
7893 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7894 op1 = convert_modes (innermode, mode,
7895 expand_expr (TREE_OPERAND (exp, 1),
7896 NULL_RTX, VOIDmode, 0),
7899 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7900 NULL_RTX, VOIDmode, 0);
7901 temp = expand_binop (mode, other_optab, op0, op1, target,
7902 unsignedp, OPTAB_LIB_WIDEN);
7903 htem = expand_mult_highpart_adjust (innermode,
7904 gen_highpart (innermode, temp),
7906 gen_highpart (innermode, temp),
7908 emit_move_insn (gen_highpart (innermode, temp), htem);
7913 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7914 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7915 return expand_mult (mode, op0, op1, target, unsignedp);
7917 case TRUNC_DIV_EXPR:
7918 case FLOOR_DIV_EXPR:
7920 case ROUND_DIV_EXPR:
7921 case EXACT_DIV_EXPR:
7922 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7924 /* Possible optimization: compute the dividend with EXPAND_SUM
7925 then if the divisor is constant can optimize the case
7926 where some terms of the dividend have coeffs divisible by it. */
7927 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7928 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7929 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7932 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7933 expensive divide. If not, combine will rebuild the original
7935 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7936 && TREE_CODE (type) == REAL_TYPE
7937 && !real_onep (TREE_OPERAND (exp, 0)))
7938 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7939 build (RDIV_EXPR, type,
7940 build_real (type, dconst1),
7941 TREE_OPERAND (exp, 1))),
7942 target, tmode, unsignedp);
7943 this_optab = sdiv_optab;
7946 case TRUNC_MOD_EXPR:
7947 case FLOOR_MOD_EXPR:
7949 case ROUND_MOD_EXPR:
7950 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7952 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7953 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7954 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7956 case FIX_ROUND_EXPR:
7957 case FIX_FLOOR_EXPR:
7959 abort (); /* Not used for C. */
7961 case FIX_TRUNC_EXPR:
7962 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7964 target = gen_reg_rtx (mode);
7965 expand_fix (target, op0, unsignedp);
7969 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7971 target = gen_reg_rtx (mode);
7972 /* expand_float can't figure out what to do if FROM has VOIDmode.
7973 So give it the correct mode. With -O, cse will optimize this. */
7974 if (GET_MODE (op0) == VOIDmode)
7975 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7977 expand_float (target, op0,
7978 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7982 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7983 temp = expand_unop (mode,
7984 ! unsignedp && flag_trapv
7985 && (GET_MODE_CLASS(mode) == MODE_INT)
7986 ? negv_optab : neg_optab, op0, target, 0);
7992 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7994 /* Handle complex values specially. */
7995 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7996 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7997 return expand_complex_abs (mode, op0, target, unsignedp);
7999 /* Unsigned abs is simply the operand. Testing here means we don't
8000 risk generating incorrect code below. */
8001 if (TREE_UNSIGNED (type))
8004 return expand_abs (mode, op0, target, unsignedp,
8005 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8009 target = original_target;
8010 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8011 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8012 || GET_MODE (target) != mode
8013 || (GET_CODE (target) == REG
8014 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8015 target = gen_reg_rtx (mode);
8016 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8017 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8019 /* First try to do it with a special MIN or MAX instruction.
8020 If that does not win, use a conditional jump to select the proper
8022 this_optab = (TREE_UNSIGNED (type)
8023 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8024 : (code == MIN_EXPR ? smin_optab : smax_optab));
8026 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8031 /* At this point, a MEM target is no longer useful; we will get better
8034 if (GET_CODE (target) == MEM)
8035 target = gen_reg_rtx (mode);
8038 emit_move_insn (target, op0);
8040 op0 = gen_label_rtx ();
8042 /* If this mode is an integer too wide to compare properly,
8043 compare word by word. Rely on cse to optimize constant cases. */
8044 if (GET_MODE_CLASS (mode) == MODE_INT
8045 && ! can_compare_p (GE, mode, ccp_jump))
8047 if (code == MAX_EXPR)
8048 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8049 target, op1, NULL_RTX, op0);
8051 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8052 op1, target, NULL_RTX, op0);
8056 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8057 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8058 unsignedp, mode, NULL_RTX, NULL_RTX,
8061 emit_move_insn (target, op1);
8066 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8067 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8073 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8074 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8079 /* ??? Can optimize bitwise operations with one arg constant.
8080 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8081 and (a bitwise1 b) bitwise2 b (etc)
8082 but that is probably not worth while. */
8084 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8085 boolean values when we want in all cases to compute both of them. In
8086 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8087 as actual zero-or-1 values and then bitwise anding. In cases where
8088 there cannot be any side effects, better code would be made by
8089 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8090 how to recognize those cases. */
8092 case TRUTH_AND_EXPR:
8094 this_optab = and_optab;
8099 this_optab = ior_optab;
8102 case TRUTH_XOR_EXPR:
8104 this_optab = xor_optab;
8111 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8113 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8114 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8117 /* Could determine the answer when only additive constants differ. Also,
8118 the addition of one can be handled by changing the condition. */
8125 case UNORDERED_EXPR:
8132 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8136 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8137 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8139 && GET_CODE (original_target) == REG
8140 && (GET_MODE (original_target)
8141 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8143 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8146 /* If temp is constant, we can just compute the result. */
8147 if (GET_CODE (temp) == CONST_INT)
8149 if (INTVAL (temp) != 0)
8150 emit_move_insn (target, const1_rtx);
8152 emit_move_insn (target, const0_rtx);
8157 if (temp != original_target)
8159 enum machine_mode mode1 = GET_MODE (temp);
8160 if (mode1 == VOIDmode)
8161 mode1 = tmode != VOIDmode ? tmode : mode;
8163 temp = copy_to_mode_reg (mode1, temp);
8166 op1 = gen_label_rtx ();
8167 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8168 GET_MODE (temp), unsignedp, op1);
8169 emit_move_insn (temp, const1_rtx);
8174 /* If no set-flag instruction, must generate a conditional
8175 store into a temporary variable. Drop through
8176 and handle this like && and ||. */
8178 case TRUTH_ANDIF_EXPR:
8179 case TRUTH_ORIF_EXPR:
8181 && (target == 0 || ! safe_from_p (target, exp, 1)
8182 /* Make sure we don't have a hard reg (such as function's return
8183 value) live across basic blocks, if not optimizing. */
8184 || (!optimize && GET_CODE (target) == REG
8185 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8186 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8189 emit_clr_insn (target);
8191 op1 = gen_label_rtx ();
8192 jumpifnot (exp, op1);
8195 emit_0_to_1_insn (target);
8198 return ignore ? const0_rtx : target;
8200 case TRUTH_NOT_EXPR:
8201 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8202 /* The parser is careful to generate TRUTH_NOT_EXPR
8203 only with operands that are always zero or one. */
8204 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8205 target, 1, OPTAB_LIB_WIDEN);
8211 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8213 return expand_expr (TREE_OPERAND (exp, 1),
8214 (ignore ? const0_rtx : target),
8218 /* If we would have a "singleton" (see below) were it not for a
8219 conversion in each arm, bring that conversion back out. */
8220 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8221 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8222 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8223 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8225 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8226 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8228 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8229 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8230 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8231 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8232 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8233 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8234 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8235 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8236 return expand_expr (build1 (NOP_EXPR, type,
8237 build (COND_EXPR, TREE_TYPE (iftrue),
8238 TREE_OPERAND (exp, 0),
8240 target, tmode, modifier);
8244 /* Note that COND_EXPRs whose type is a structure or union
8245 are required to be constructed to contain assignments of
8246 a temporary variable, so that we can evaluate them here
8247 for side effect only. If type is void, we must do likewise. */
8249 /* If an arm of the branch requires a cleanup,
8250 only that cleanup is performed. */
8253 tree binary_op = 0, unary_op = 0;
8255 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8256 convert it to our mode, if necessary. */
8257 if (integer_onep (TREE_OPERAND (exp, 1))
8258 && integer_zerop (TREE_OPERAND (exp, 2))
8259 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8263 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8268 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8269 if (GET_MODE (op0) == mode)
8273 target = gen_reg_rtx (mode);
8274 convert_move (target, op0, unsignedp);
8278 /* Check for X ? A + B : A. If we have this, we can copy A to the
8279 output and conditionally add B. Similarly for unary operations.
8280 Don't do this if X has side-effects because those side effects
8281 might affect A or B and the "?" operation is a sequence point in
8282 ANSI. (operand_equal_p tests for side effects.) */
8284 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8285 && operand_equal_p (TREE_OPERAND (exp, 2),
8286 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8287 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8288 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8289 && operand_equal_p (TREE_OPERAND (exp, 1),
8290 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8291 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8292 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8293 && operand_equal_p (TREE_OPERAND (exp, 2),
8294 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8295 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8296 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8297 && operand_equal_p (TREE_OPERAND (exp, 1),
8298 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8299 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8301 /* If we are not to produce a result, we have no target. Otherwise,
8302 if a target was specified use it; it will not be used as an
8303 intermediate target unless it is safe. If no target, use a
8308 else if (original_target
8309 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8310 || (singleton && GET_CODE (original_target) == REG
8311 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8312 && original_target == var_rtx (singleton)))
8313 && GET_MODE (original_target) == mode
8314 #ifdef HAVE_conditional_move
8315 && (! can_conditionally_move_p (mode)
8316 || GET_CODE (original_target) == REG
8317 || TREE_ADDRESSABLE (type))
8319 && (GET_CODE (original_target) != MEM
8320 || TREE_ADDRESSABLE (type)))
8321 temp = original_target;
8322 else if (TREE_ADDRESSABLE (type))
8325 temp = assign_temp (type, 0, 0, 1);
8327 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8328 do the test of X as a store-flag operation, do this as
8329 A + ((X != 0) << log C). Similarly for other simple binary
8330 operators. Only do for C == 1 if BRANCH_COST is low. */
8331 if (temp && singleton && binary_op
8332 && (TREE_CODE (binary_op) == PLUS_EXPR
8333 || TREE_CODE (binary_op) == MINUS_EXPR
8334 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8335 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8336 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8337 : integer_onep (TREE_OPERAND (binary_op, 1)))
8338 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8341 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8342 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8343 ? addv_optab : add_optab)
8344 : TREE_CODE (binary_op) == MINUS_EXPR
8345 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8346 ? subv_optab : sub_optab)
8347 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8350 /* If we had X ? A : A + 1, do this as A + (X == 0).
8352 We have to invert the truth value here and then put it
8353 back later if do_store_flag fails. We cannot simply copy
8354 TREE_OPERAND (exp, 0) to another variable and modify that
8355 because invert_truthvalue can modify the tree pointed to
8357 if (singleton == TREE_OPERAND (exp, 1))
8358 TREE_OPERAND (exp, 0)
8359 = invert_truthvalue (TREE_OPERAND (exp, 0));
8361 result = do_store_flag (TREE_OPERAND (exp, 0),
8362 (safe_from_p (temp, singleton, 1)
8364 mode, BRANCH_COST <= 1);
8366 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8367 result = expand_shift (LSHIFT_EXPR, mode, result,
8368 build_int_2 (tree_log2
8372 (safe_from_p (temp, singleton, 1)
8373 ? temp : NULL_RTX), 0);
8377 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8378 return expand_binop (mode, boptab, op1, result, temp,
8379 unsignedp, OPTAB_LIB_WIDEN);
8381 else if (singleton == TREE_OPERAND (exp, 1))
8382 TREE_OPERAND (exp, 0)
8383 = invert_truthvalue (TREE_OPERAND (exp, 0));
8386 do_pending_stack_adjust ();
8388 op0 = gen_label_rtx ();
8390 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8394 /* If the target conflicts with the other operand of the
8395 binary op, we can't use it. Also, we can't use the target
8396 if it is a hard register, because evaluating the condition
8397 might clobber it. */
8399 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8400 || (GET_CODE (temp) == REG
8401 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8402 temp = gen_reg_rtx (mode);
8403 store_expr (singleton, temp, 0);
8406 expand_expr (singleton,
8407 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8408 if (singleton == TREE_OPERAND (exp, 1))
8409 jumpif (TREE_OPERAND (exp, 0), op0);
8411 jumpifnot (TREE_OPERAND (exp, 0), op0);
8413 start_cleanup_deferral ();
8414 if (binary_op && temp == 0)
8415 /* Just touch the other operand. */
8416 expand_expr (TREE_OPERAND (binary_op, 1),
8417 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8419 store_expr (build (TREE_CODE (binary_op), type,
8420 make_tree (type, temp),
8421 TREE_OPERAND (binary_op, 1)),
8424 store_expr (build1 (TREE_CODE (unary_op), type,
8425 make_tree (type, temp)),
8429 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8430 comparison operator. If we have one of these cases, set the
8431 output to A, branch on A (cse will merge these two references),
8432 then set the output to FOO. */
8434 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8435 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8436 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8437 TREE_OPERAND (exp, 1), 0)
8438 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8439 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8440 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8442 if (GET_CODE (temp) == REG
8443 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8444 temp = gen_reg_rtx (mode);
8445 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8446 jumpif (TREE_OPERAND (exp, 0), op0);
8448 start_cleanup_deferral ();
8449 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8453 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8454 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8455 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8456 TREE_OPERAND (exp, 2), 0)
8457 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8458 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8459 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8461 if (GET_CODE (temp) == REG
8462 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8463 temp = gen_reg_rtx (mode);
8464 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8465 jumpifnot (TREE_OPERAND (exp, 0), op0);
8467 start_cleanup_deferral ();
8468 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8473 op1 = gen_label_rtx ();
8474 jumpifnot (TREE_OPERAND (exp, 0), op0);
8476 start_cleanup_deferral ();
8478 /* One branch of the cond can be void, if it never returns. For
8479 example A ? throw : E */
8481 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8482 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8484 expand_expr (TREE_OPERAND (exp, 1),
8485 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8486 end_cleanup_deferral ();
8488 emit_jump_insn (gen_jump (op1));
8491 start_cleanup_deferral ();
8493 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8494 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8496 expand_expr (TREE_OPERAND (exp, 2),
8497 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8500 end_cleanup_deferral ();
8511 /* Something needs to be initialized, but we didn't know
8512 where that thing was when building the tree. For example,
8513 it could be the return value of a function, or a parameter
8514 to a function which lays down in the stack, or a temporary
8515 variable which must be passed by reference.
8517 We guarantee that the expression will either be constructed
8518 or copied into our original target. */
8520 tree slot = TREE_OPERAND (exp, 0);
8521 tree cleanups = NULL_TREE;
8524 if (TREE_CODE (slot) != VAR_DECL)
8528 target = original_target;
8530 /* Set this here so that if we get a target that refers to a
8531 register variable that's already been used, put_reg_into_stack
8532 knows that it should fix up those uses. */
8533 TREE_USED (slot) = 1;
8537 if (DECL_RTL_SET_P (slot))
8539 target = DECL_RTL (slot);
8540 /* If we have already expanded the slot, so don't do
8542 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8547 target = assign_temp (type, 2, 0, 1);
8548 /* All temp slots at this level must not conflict. */
8549 preserve_temp_slots (target);
8550 SET_DECL_RTL (slot, target);
8551 if (TREE_ADDRESSABLE (slot))
8552 put_var_into_stack (slot);
8554 /* Since SLOT is not known to the called function
8555 to belong to its stack frame, we must build an explicit
8556 cleanup. This case occurs when we must build up a reference
8557 to pass the reference as an argument. In this case,
8558 it is very likely that such a reference need not be
8561 if (TREE_OPERAND (exp, 2) == 0)
8562 TREE_OPERAND (exp, 2)
8563 = (*lang_hooks.maybe_build_cleanup) (slot);
8564 cleanups = TREE_OPERAND (exp, 2);
8569 /* This case does occur, when expanding a parameter which
8570 needs to be constructed on the stack. The target
8571 is the actual stack address that we want to initialize.
8572 The function we call will perform the cleanup in this case. */
8574 /* If we have already assigned it space, use that space,
8575 not target that we were passed in, as our target
8576 parameter is only a hint. */
8577 if (DECL_RTL_SET_P (slot))
8579 target = DECL_RTL (slot);
8580 /* If we have already expanded the slot, so don't do
8582 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8587 SET_DECL_RTL (slot, target);
8588 /* If we must have an addressable slot, then make sure that
8589 the RTL that we just stored in slot is OK. */
8590 if (TREE_ADDRESSABLE (slot))
8591 put_var_into_stack (slot);
8595 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8596 /* Mark it as expanded. */
8597 TREE_OPERAND (exp, 1) = NULL_TREE;
8599 store_expr (exp1, target, 0);
8601 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8608 tree lhs = TREE_OPERAND (exp, 0);
8609 tree rhs = TREE_OPERAND (exp, 1);
8611 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8617 /* If lhs is complex, expand calls in rhs before computing it.
8618 That's so we don't compute a pointer and save it over a
8619 call. If lhs is simple, compute it first so we can give it
8620 as a target if the rhs is just a call. This avoids an
8621 extra temp and copy and that prevents a partial-subsumption
8622 which makes bad code. Actually we could treat
8623 component_ref's of vars like vars. */
8625 tree lhs = TREE_OPERAND (exp, 0);
8626 tree rhs = TREE_OPERAND (exp, 1);
8630 /* Check for |= or &= of a bitfield of size one into another bitfield
8631 of size 1. In this case, (unless we need the result of the
8632 assignment) we can do this more efficiently with a
8633 test followed by an assignment, if necessary.
8635 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8636 things change so we do, this code should be enhanced to
8639 && TREE_CODE (lhs) == COMPONENT_REF
8640 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8641 || TREE_CODE (rhs) == BIT_AND_EXPR)
8642 && TREE_OPERAND (rhs, 0) == lhs
8643 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8644 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8645 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8647 rtx label = gen_label_rtx ();
8649 do_jump (TREE_OPERAND (rhs, 1),
8650 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8651 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8652 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8653 (TREE_CODE (rhs) == BIT_IOR_EXPR
8655 : integer_zero_node)),
8657 do_pending_stack_adjust ();
8662 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8668 if (!TREE_OPERAND (exp, 0))
8669 expand_null_return ();
8671 expand_return (TREE_OPERAND (exp, 0));
8674 case PREINCREMENT_EXPR:
8675 case PREDECREMENT_EXPR:
8676 return expand_increment (exp, 0, ignore);
8678 case POSTINCREMENT_EXPR:
8679 case POSTDECREMENT_EXPR:
8680 /* Faster to treat as pre-increment if result is not used. */
8681 return expand_increment (exp, ! ignore, ignore);
8684 /* Are we taking the address of a nested function? */
8685 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8686 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8687 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8688 && ! TREE_STATIC (exp))
8690 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8691 op0 = force_operand (op0, target);
8693 /* If we are taking the address of something erroneous, just
8695 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8697 /* If we are taking the address of a constant and are at the
8698 top level, we have to use output_constant_def since we can't
8699 call force_const_mem at top level. */
8701 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8702 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8704 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8707 /* We make sure to pass const0_rtx down if we came in with
8708 ignore set, to avoid doing the cleanups twice for something. */
8709 op0 = expand_expr (TREE_OPERAND (exp, 0),
8710 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8711 (modifier == EXPAND_INITIALIZER
8712 ? modifier : EXPAND_CONST_ADDRESS));
8714 /* If we are going to ignore the result, OP0 will have been set
8715 to const0_rtx, so just return it. Don't get confused and
8716 think we are taking the address of the constant. */
8720 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8721 clever and returns a REG when given a MEM. */
8722 op0 = protect_from_queue (op0, 1);
8724 /* We would like the object in memory. If it is a constant, we can
8725 have it be statically allocated into memory. For a non-constant,
8726 we need to allocate some memory and store the value into it. */
8728 if (CONSTANT_P (op0))
8729 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8731 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8732 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8733 || GET_CODE (op0) == PARALLEL)
8735 /* If the operand is a SAVE_EXPR, we can deal with this by
8736 forcing the SAVE_EXPR into memory. */
8737 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8739 put_var_into_stack (TREE_OPERAND (exp, 0));
8740 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8744 /* If this object is in a register, it can't be BLKmode. */
8745 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8746 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8748 if (GET_CODE (op0) == PARALLEL)
8749 /* Handle calls that pass values in multiple
8750 non-contiguous locations. The Irix 6 ABI has examples
8752 emit_group_store (memloc, op0,
8753 int_size_in_bytes (inner_type));
8755 emit_move_insn (memloc, op0);
8761 if (GET_CODE (op0) != MEM)
8764 mark_temp_addr_taken (op0);
8765 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8767 op0 = XEXP (op0, 0);
8768 #ifdef POINTERS_EXTEND_UNSIGNED
8769 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8770 && mode == ptr_mode)
8771 op0 = convert_memory_address (ptr_mode, op0);
8776 /* If OP0 is not aligned as least as much as the type requires, we
8777 need to make a temporary, copy OP0 to it, and take the address of
8778 the temporary. We want to use the alignment of the type, not of
8779 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8780 the test for BLKmode means that can't happen. The test for
8781 BLKmode is because we never make mis-aligned MEMs with
8784 We don't need to do this at all if the machine doesn't have
8785 strict alignment. */
8786 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8787 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8789 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8791 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8793 = assign_stack_temp_for_type
8794 (TYPE_MODE (inner_type),
8795 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8796 : int_size_in_bytes (inner_type),
8797 1, build_qualified_type (inner_type,
8798 (TYPE_QUALS (inner_type)
8799 | TYPE_QUAL_CONST)));
8801 if (TYPE_ALIGN_OK (inner_type))
8804 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8808 op0 = force_operand (XEXP (op0, 0), target);
8812 && GET_CODE (op0) != REG
8813 && modifier != EXPAND_CONST_ADDRESS
8814 && modifier != EXPAND_INITIALIZER
8815 && modifier != EXPAND_SUM)
8816 op0 = force_reg (Pmode, op0);
8818 if (GET_CODE (op0) == REG
8819 && ! REG_USERVAR_P (op0))
8820 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8822 #ifdef POINTERS_EXTEND_UNSIGNED
8823 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8824 && mode == ptr_mode)
8825 op0 = convert_memory_address (ptr_mode, op0);
8830 case ENTRY_VALUE_EXPR:
8833 /* COMPLEX type for Extended Pascal & Fortran */
8836 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8839 /* Get the rtx code of the operands. */
8840 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8841 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8844 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8848 /* Move the real (op0) and imaginary (op1) parts to their location. */
8849 emit_move_insn (gen_realpart (mode, target), op0);
8850 emit_move_insn (gen_imagpart (mode, target), op1);
8852 insns = get_insns ();
8855 /* Complex construction should appear as a single unit. */
8856 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8857 each with a separate pseudo as destination.
8858 It's not correct for flow to treat them as a unit. */
8859 if (GET_CODE (target) != CONCAT)
8860 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8868 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8869 return gen_realpart (mode, op0);
8872 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8873 return gen_imagpart (mode, op0);
8877 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8881 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8884 target = gen_reg_rtx (mode);
8888 /* Store the realpart and the negated imagpart to target. */
8889 emit_move_insn (gen_realpart (partmode, target),
8890 gen_realpart (partmode, op0));
8892 imag_t = gen_imagpart (partmode, target);
8893 temp = expand_unop (partmode,
8894 ! unsignedp && flag_trapv
8895 && (GET_MODE_CLASS(partmode) == MODE_INT)
8896 ? negv_optab : neg_optab,
8897 gen_imagpart (partmode, op0), imag_t, 0);
8899 emit_move_insn (imag_t, temp);
8901 insns = get_insns ();
8904 /* Conjugate should appear as a single unit
8905 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8906 each with a separate pseudo as destination.
8907 It's not correct for flow to treat them as a unit. */
8908 if (GET_CODE (target) != CONCAT)
8909 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8916 case TRY_CATCH_EXPR:
8918 tree handler = TREE_OPERAND (exp, 1);
8920 expand_eh_region_start ();
8922 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8924 expand_eh_region_end_cleanup (handler);
8929 case TRY_FINALLY_EXPR:
8931 tree try_block = TREE_OPERAND (exp, 0);
8932 tree finally_block = TREE_OPERAND (exp, 1);
8933 rtx finally_label = gen_label_rtx ();
8934 rtx done_label = gen_label_rtx ();
8935 rtx return_link = gen_reg_rtx (Pmode);
8936 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8937 (tree) finally_label, (tree) return_link);
8938 TREE_SIDE_EFFECTS (cleanup) = 1;
8940 /* Start a new binding layer that will keep track of all cleanup
8941 actions to be performed. */
8942 expand_start_bindings (2);
8944 target_temp_slot_level = temp_slot_level;
8946 expand_decl_cleanup (NULL_TREE, cleanup);
8947 op0 = expand_expr (try_block, target, tmode, modifier);
8949 preserve_temp_slots (op0);
8950 expand_end_bindings (NULL_TREE, 0, 0);
8951 emit_jump (done_label);
8952 emit_label (finally_label);
8953 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8954 emit_indirect_jump (return_link);
8955 emit_label (done_label);
8959 case GOTO_SUBROUTINE_EXPR:
8961 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8962 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8963 rtx return_address = gen_label_rtx ();
8964 emit_move_insn (return_link,
8965 gen_rtx_LABEL_REF (Pmode, return_address));
8967 emit_label (return_address);
8972 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8975 return get_exception_pointer (cfun);
8978 /* Function descriptors are not valid except for as
8979 initialization constants, and should not be expanded. */
8983 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
8986 /* Here to do an ordinary binary operator, generating an instruction
8987 from the optab already placed in `this_optab'. */
8989 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8991 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8992 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8994 temp = expand_binop (mode, this_optab, op0, op1, target,
8995 unsignedp, OPTAB_LIB_WIDEN);
9001 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9002 when applied to the address of EXP produces an address known to be
9003 aligned more than BIGGEST_ALIGNMENT. */
9006 is_aligning_offset (offset, exp)
9010 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9011 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9012 || TREE_CODE (offset) == NOP_EXPR
9013 || TREE_CODE (offset) == CONVERT_EXPR
9014 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9015 offset = TREE_OPERAND (offset, 0);
9017 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9018 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9019 if (TREE_CODE (offset) != BIT_AND_EXPR
9020 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9021 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9022 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9025 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9026 It must be NEGATE_EXPR. Then strip any more conversions. */
9027 offset = TREE_OPERAND (offset, 0);
9028 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9029 || TREE_CODE (offset) == NOP_EXPR
9030 || TREE_CODE (offset) == CONVERT_EXPR)
9031 offset = TREE_OPERAND (offset, 0);
9033 if (TREE_CODE (offset) != NEGATE_EXPR)
9036 offset = TREE_OPERAND (offset, 0);
9037 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9038 || TREE_CODE (offset) == NOP_EXPR
9039 || TREE_CODE (offset) == CONVERT_EXPR)
9040 offset = TREE_OPERAND (offset, 0);
9042 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9043 whose type is the same as EXP. */
9044 return (TREE_CODE (offset) == ADDR_EXPR
9045 && (TREE_OPERAND (offset, 0) == exp
9046 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9047 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9048 == TREE_TYPE (exp)))));
9051 /* Return the tree node if an ARG corresponds to a string constant or zero
9052 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9053 in bytes within the string that ARG is accessing. The type of the
9054 offset will be `sizetype'. */
9057 string_constant (arg, ptr_offset)
9063 if (TREE_CODE (arg) == ADDR_EXPR
9064 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9066 *ptr_offset = size_zero_node;
9067 return TREE_OPERAND (arg, 0);
9069 else if (TREE_CODE (arg) == PLUS_EXPR)
9071 tree arg0 = TREE_OPERAND (arg, 0);
9072 tree arg1 = TREE_OPERAND (arg, 1);
9077 if (TREE_CODE (arg0) == ADDR_EXPR
9078 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9080 *ptr_offset = convert (sizetype, arg1);
9081 return TREE_OPERAND (arg0, 0);
9083 else if (TREE_CODE (arg1) == ADDR_EXPR
9084 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9086 *ptr_offset = convert (sizetype, arg0);
9087 return TREE_OPERAND (arg1, 0);
9094 /* Expand code for a post- or pre- increment or decrement
9095 and return the RTX for the result.
9096 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9099 expand_increment (exp, post, ignore)
9105 tree incremented = TREE_OPERAND (exp, 0);
9106 optab this_optab = add_optab;
9108 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9109 int op0_is_copy = 0;
9110 int single_insn = 0;
9111 /* 1 means we can't store into OP0 directly,
9112 because it is a subreg narrower than a word,
9113 and we don't dare clobber the rest of the word. */
9116 /* Stabilize any component ref that might need to be
9117 evaluated more than once below. */
9119 || TREE_CODE (incremented) == BIT_FIELD_REF
9120 || (TREE_CODE (incremented) == COMPONENT_REF
9121 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9122 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9123 incremented = stabilize_reference (incremented);
9124 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9125 ones into save exprs so that they don't accidentally get evaluated
9126 more than once by the code below. */
9127 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9128 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9129 incremented = save_expr (incremented);
9131 /* Compute the operands as RTX.
9132 Note whether OP0 is the actual lvalue or a copy of it:
9133 I believe it is a copy iff it is a register or subreg
9134 and insns were generated in computing it. */
9136 temp = get_last_insn ();
9137 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9139 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9140 in place but instead must do sign- or zero-extension during assignment,
9141 so we copy it into a new register and let the code below use it as
9144 Note that we can safely modify this SUBREG since it is know not to be
9145 shared (it was made by the expand_expr call above). */
9147 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9150 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9154 else if (GET_CODE (op0) == SUBREG
9155 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9157 /* We cannot increment this SUBREG in place. If we are
9158 post-incrementing, get a copy of the old value. Otherwise,
9159 just mark that we cannot increment in place. */
9161 op0 = copy_to_reg (op0);
9166 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9167 && temp != get_last_insn ());
9168 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9170 /* Decide whether incrementing or decrementing. */
9171 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9172 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9173 this_optab = sub_optab;
9175 /* Convert decrement by a constant into a negative increment. */
9176 if (this_optab == sub_optab
9177 && GET_CODE (op1) == CONST_INT)
9179 op1 = GEN_INT (-INTVAL (op1));
9180 this_optab = add_optab;
9183 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9184 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9186 /* For a preincrement, see if we can do this with a single instruction. */
9189 icode = (int) this_optab->handlers[(int) mode].insn_code;
9190 if (icode != (int) CODE_FOR_nothing
9191 /* Make sure that OP0 is valid for operands 0 and 1
9192 of the insn we want to queue. */
9193 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9194 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9195 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9199 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9200 then we cannot just increment OP0. We must therefore contrive to
9201 increment the original value. Then, for postincrement, we can return
9202 OP0 since it is a copy of the old value. For preincrement, expand here
9203 unless we can do it with a single insn.
9205 Likewise if storing directly into OP0 would clobber high bits
9206 we need to preserve (bad_subreg). */
9207 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9209 /* This is the easiest way to increment the value wherever it is.
9210 Problems with multiple evaluation of INCREMENTED are prevented
9211 because either (1) it is a component_ref or preincrement,
9212 in which case it was stabilized above, or (2) it is an array_ref
9213 with constant index in an array in a register, which is
9214 safe to reevaluate. */
9215 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9216 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9217 ? MINUS_EXPR : PLUS_EXPR),
9220 TREE_OPERAND (exp, 1));
9222 while (TREE_CODE (incremented) == NOP_EXPR
9223 || TREE_CODE (incremented) == CONVERT_EXPR)
9225 newexp = convert (TREE_TYPE (incremented), newexp);
9226 incremented = TREE_OPERAND (incremented, 0);
9229 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9230 return post ? op0 : temp;
9235 /* We have a true reference to the value in OP0.
9236 If there is an insn to add or subtract in this mode, queue it.
9237 Queueing the increment insn avoids the register shuffling
9238 that often results if we must increment now and first save
9239 the old value for subsequent use. */
9241 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9242 op0 = stabilize (op0);
9245 icode = (int) this_optab->handlers[(int) mode].insn_code;
9246 if (icode != (int) CODE_FOR_nothing
9247 /* Make sure that OP0 is valid for operands 0 and 1
9248 of the insn we want to queue. */
9249 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9250 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9252 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9253 op1 = force_reg (mode, op1);
9255 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9257 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9259 rtx addr = (general_operand (XEXP (op0, 0), mode)
9260 ? force_reg (Pmode, XEXP (op0, 0))
9261 : copy_to_reg (XEXP (op0, 0)));
9264 op0 = replace_equiv_address (op0, addr);
9265 temp = force_reg (GET_MODE (op0), op0);
9266 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9267 op1 = force_reg (mode, op1);
9269 /* The increment queue is LIFO, thus we have to `queue'
9270 the instructions in reverse order. */
9271 enqueue_insn (op0, gen_move_insn (op0, temp));
9272 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9277 /* Preincrement, or we can't increment with one simple insn. */
9279 /* Save a copy of the value before inc or dec, to return it later. */
9280 temp = value = copy_to_reg (op0);
9282 /* Arrange to return the incremented value. */
9283 /* Copy the rtx because expand_binop will protect from the queue,
9284 and the results of that would be invalid for us to return
9285 if our caller does emit_queue before using our result. */
9286 temp = copy_rtx (value = op0);
9288 /* Increment however we can. */
9289 op1 = expand_binop (mode, this_optab, value, op1, op0,
9290 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9292 /* Make sure the value is stored into OP0. */
9294 emit_move_insn (op0, op1);
9299 /* At the start of a function, record that we have no previously-pushed
9300 arguments waiting to be popped. */
9303 init_pending_stack_adjust ()
9305 pending_stack_adjust = 0;
9308 /* When exiting from function, if safe, clear out any pending stack adjust
9309 so the adjustment won't get done.
9311 Note, if the current function calls alloca, then it must have a
9312 frame pointer regardless of the value of flag_omit_frame_pointer. */
9315 clear_pending_stack_adjust ()
9317 #ifdef EXIT_IGNORE_STACK
9319 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9320 && EXIT_IGNORE_STACK
9321 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9322 && ! flag_inline_functions)
9324 stack_pointer_delta -= pending_stack_adjust,
9325 pending_stack_adjust = 0;
9330 /* Pop any previously-pushed arguments that have not been popped yet. */
9333 do_pending_stack_adjust ()
9335 if (inhibit_defer_pop == 0)
9337 if (pending_stack_adjust != 0)
9338 adjust_stack (GEN_INT (pending_stack_adjust));
9339 pending_stack_adjust = 0;
9343 /* Expand conditional expressions. */
9345 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9346 LABEL is an rtx of code CODE_LABEL, in this function and all the
9350 jumpifnot (exp, label)
9354 do_jump (exp, label, NULL_RTX);
9357 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9364 do_jump (exp, NULL_RTX, label);
9367 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9368 the result is zero, or IF_TRUE_LABEL if the result is one.
9369 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9370 meaning fall through in that case.
9372 do_jump always does any pending stack adjust except when it does not
9373 actually perform a jump. An example where there is no jump
9374 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9376 This function is responsible for optimizing cases such as
9377 &&, || and comparison operators in EXP. */
9380 do_jump (exp, if_false_label, if_true_label)
9382 rtx if_false_label, if_true_label;
9384 enum tree_code code = TREE_CODE (exp);
9385 /* Some cases need to create a label to jump to
9386 in order to properly fall through.
9387 These cases set DROP_THROUGH_LABEL nonzero. */
9388 rtx drop_through_label = 0;
9392 enum machine_mode mode;
9394 #ifdef MAX_INTEGER_COMPUTATION_MODE
9395 check_max_integer_computation_mode (exp);
9406 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9412 /* This is not true with #pragma weak */
9414 /* The address of something can never be zero. */
9416 emit_jump (if_true_label);
9421 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9422 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9423 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9424 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9427 /* If we are narrowing the operand, we have to do the compare in the
9429 if ((TYPE_PRECISION (TREE_TYPE (exp))
9430 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9432 case NON_LVALUE_EXPR:
9433 case REFERENCE_EXPR:
9438 /* These cannot change zero->non-zero or vice versa. */
9439 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9442 case WITH_RECORD_EXPR:
9443 /* Put the object on the placeholder list, recurse through our first
9444 operand, and pop the list. */
9445 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9447 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9448 placeholder_list = TREE_CHAIN (placeholder_list);
9452 /* This is never less insns than evaluating the PLUS_EXPR followed by
9453 a test and can be longer if the test is eliminated. */
9455 /* Reduce to minus. */
9456 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9457 TREE_OPERAND (exp, 0),
9458 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9459 TREE_OPERAND (exp, 1))));
9460 /* Process as MINUS. */
9464 /* Non-zero iff operands of minus differ. */
9465 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9466 TREE_OPERAND (exp, 0),
9467 TREE_OPERAND (exp, 1)),
9468 NE, NE, if_false_label, if_true_label);
9472 /* If we are AND'ing with a small constant, do this comparison in the
9473 smallest type that fits. If the machine doesn't have comparisons
9474 that small, it will be converted back to the wider comparison.
9475 This helps if we are testing the sign bit of a narrower object.
9476 combine can't do this for us because it can't know whether a
9477 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9479 if (! SLOW_BYTE_ACCESS
9480 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9481 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9482 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9483 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9484 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9485 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9486 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9487 != CODE_FOR_nothing))
9489 do_jump (convert (type, exp), if_false_label, if_true_label);
9494 case TRUTH_NOT_EXPR:
9495 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9498 case TRUTH_ANDIF_EXPR:
9499 if (if_false_label == 0)
9500 if_false_label = drop_through_label = gen_label_rtx ();
9501 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9502 start_cleanup_deferral ();
9503 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9504 end_cleanup_deferral ();
9507 case TRUTH_ORIF_EXPR:
9508 if (if_true_label == 0)
9509 if_true_label = drop_through_label = gen_label_rtx ();
9510 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9511 start_cleanup_deferral ();
9512 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9513 end_cleanup_deferral ();
9518 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9519 preserve_temp_slots (NULL_RTX);
9523 do_pending_stack_adjust ();
9524 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9530 case ARRAY_RANGE_REF:
9532 HOST_WIDE_INT bitsize, bitpos;
9534 enum machine_mode mode;
9539 /* Get description of this reference. We don't actually care
9540 about the underlying object here. */
9541 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9542 &unsignedp, &volatilep);
9544 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9545 if (! SLOW_BYTE_ACCESS
9546 && type != 0 && bitsize >= 0
9547 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9548 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9549 != CODE_FOR_nothing))
9551 do_jump (convert (type, exp), if_false_label, if_true_label);
9558 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9559 if (integer_onep (TREE_OPERAND (exp, 1))
9560 && integer_zerop (TREE_OPERAND (exp, 2)))
9561 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9563 else if (integer_zerop (TREE_OPERAND (exp, 1))
9564 && integer_onep (TREE_OPERAND (exp, 2)))
9565 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9569 rtx label1 = gen_label_rtx ();
9570 drop_through_label = gen_label_rtx ();
9572 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9574 start_cleanup_deferral ();
9575 /* Now the THEN-expression. */
9576 do_jump (TREE_OPERAND (exp, 1),
9577 if_false_label ? if_false_label : drop_through_label,
9578 if_true_label ? if_true_label : drop_through_label);
9579 /* In case the do_jump just above never jumps. */
9580 do_pending_stack_adjust ();
9581 emit_label (label1);
9583 /* Now the ELSE-expression. */
9584 do_jump (TREE_OPERAND (exp, 2),
9585 if_false_label ? if_false_label : drop_through_label,
9586 if_true_label ? if_true_label : drop_through_label);
9587 end_cleanup_deferral ();
9593 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9595 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9596 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9598 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9599 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9602 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9603 fold (build (EQ_EXPR, TREE_TYPE (exp),
9604 fold (build1 (REALPART_EXPR,
9605 TREE_TYPE (inner_type),
9607 fold (build1 (REALPART_EXPR,
9608 TREE_TYPE (inner_type),
9610 fold (build (EQ_EXPR, TREE_TYPE (exp),
9611 fold (build1 (IMAGPART_EXPR,
9612 TREE_TYPE (inner_type),
9614 fold (build1 (IMAGPART_EXPR,
9615 TREE_TYPE (inner_type),
9617 if_false_label, if_true_label);
9620 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9621 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9623 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9624 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9625 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9627 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9633 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9635 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9636 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9638 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9639 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9642 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9643 fold (build (NE_EXPR, TREE_TYPE (exp),
9644 fold (build1 (REALPART_EXPR,
9645 TREE_TYPE (inner_type),
9647 fold (build1 (REALPART_EXPR,
9648 TREE_TYPE (inner_type),
9650 fold (build (NE_EXPR, TREE_TYPE (exp),
9651 fold (build1 (IMAGPART_EXPR,
9652 TREE_TYPE (inner_type),
9654 fold (build1 (IMAGPART_EXPR,
9655 TREE_TYPE (inner_type),
9657 if_false_label, if_true_label);
9660 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9661 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9663 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9664 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9665 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9667 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9672 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9673 if (GET_MODE_CLASS (mode) == MODE_INT
9674 && ! can_compare_p (LT, mode, ccp_jump))
9675 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9677 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9681 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9682 if (GET_MODE_CLASS (mode) == MODE_INT
9683 && ! can_compare_p (LE, mode, ccp_jump))
9684 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9686 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9690 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9691 if (GET_MODE_CLASS (mode) == MODE_INT
9692 && ! can_compare_p (GT, mode, ccp_jump))
9693 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9695 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9699 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9700 if (GET_MODE_CLASS (mode) == MODE_INT
9701 && ! can_compare_p (GE, mode, ccp_jump))
9702 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9704 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9707 case UNORDERED_EXPR:
9710 enum rtx_code cmp, rcmp;
9713 if (code == UNORDERED_EXPR)
9714 cmp = UNORDERED, rcmp = ORDERED;
9716 cmp = ORDERED, rcmp = UNORDERED;
9717 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9720 if (! can_compare_p (cmp, mode, ccp_jump)
9721 && (can_compare_p (rcmp, mode, ccp_jump)
9722 /* If the target doesn't provide either UNORDERED or ORDERED
9723 comparisons, canonicalize on UNORDERED for the library. */
9724 || rcmp == UNORDERED))
9728 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9730 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9735 enum rtx_code rcode1;
9736 enum tree_code tcode2;
9760 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9761 if (can_compare_p (rcode1, mode, ccp_jump))
9762 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9766 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9767 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9770 /* If the target doesn't support combined unordered
9771 compares, decompose into UNORDERED + comparison. */
9772 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9773 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9774 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9775 do_jump (exp, if_false_label, if_true_label);
9781 __builtin_expect (<test>, 0) and
9782 __builtin_expect (<test>, 1)
9784 We need to do this here, so that <test> is not converted to a SCC
9785 operation on machines that use condition code registers and COMPARE
9786 like the PowerPC, and then the jump is done based on whether the SCC
9787 operation produced a 1 or 0. */
9789 /* Check for a built-in function. */
9790 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9792 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9793 tree arglist = TREE_OPERAND (exp, 1);
9795 if (TREE_CODE (fndecl) == FUNCTION_DECL
9796 && DECL_BUILT_IN (fndecl)
9797 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9798 && arglist != NULL_TREE
9799 && TREE_CHAIN (arglist) != NULL_TREE)
9801 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9804 if (seq != NULL_RTX)
9811 /* fall through and generate the normal code. */
9815 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9817 /* This is not needed any more and causes poor code since it causes
9818 comparisons and tests from non-SI objects to have different code
9820 /* Copy to register to avoid generating bad insns by cse
9821 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9822 if (!cse_not_expected && GET_CODE (temp) == MEM)
9823 temp = copy_to_reg (temp);
9825 do_pending_stack_adjust ();
9826 /* Do any postincrements in the expression that was tested. */
9829 if (GET_CODE (temp) == CONST_INT
9830 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9831 || GET_CODE (temp) == LABEL_REF)
9833 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9837 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9838 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9839 /* Note swapping the labels gives us not-equal. */
9840 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9841 else if (GET_MODE (temp) != VOIDmode)
9842 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9843 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9844 GET_MODE (temp), NULL_RTX,
9845 if_false_label, if_true_label);
9850 if (drop_through_label)
9852 /* If do_jump produces code that might be jumped around,
9853 do any stack adjusts from that code, before the place
9854 where control merges in. */
9855 do_pending_stack_adjust ();
9856 emit_label (drop_through_label);
9860 /* Given a comparison expression EXP for values too wide to be compared
9861 with one insn, test the comparison and jump to the appropriate label.
9862 The code of EXP is ignored; we always test GT if SWAP is 0,
9863 and LT if SWAP is 1. */
9866 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9869 rtx if_false_label, if_true_label;
9871 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9872 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9873 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9874 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9876 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9879 /* Compare OP0 with OP1, word at a time, in mode MODE.
9880 UNSIGNEDP says to do unsigned comparison.
9881 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9884 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9885 enum machine_mode mode;
9888 rtx if_false_label, if_true_label;
9890 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9891 rtx drop_through_label = 0;
9894 if (! if_true_label || ! if_false_label)
9895 drop_through_label = gen_label_rtx ();
9896 if (! if_true_label)
9897 if_true_label = drop_through_label;
9898 if (! if_false_label)
9899 if_false_label = drop_through_label;
9901 /* Compare a word at a time, high order first. */
9902 for (i = 0; i < nwords; i++)
9904 rtx op0_word, op1_word;
9906 if (WORDS_BIG_ENDIAN)
9908 op0_word = operand_subword_force (op0, i, mode);
9909 op1_word = operand_subword_force (op1, i, mode);
9913 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9914 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9917 /* All but high-order word must be compared as unsigned. */
9918 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9919 (unsignedp || i > 0), word_mode, NULL_RTX,
9920 NULL_RTX, if_true_label);
9922 /* Consider lower words only if these are equal. */
9923 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9924 NULL_RTX, NULL_RTX, if_false_label);
9928 emit_jump (if_false_label);
9929 if (drop_through_label)
9930 emit_label (drop_through_label);
9933 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9934 with one insn, test the comparison and jump to the appropriate label. */
9937 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9939 rtx if_false_label, if_true_label;
9941 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9942 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9943 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9944 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9946 rtx drop_through_label = 0;
9948 if (! if_false_label)
9949 drop_through_label = if_false_label = gen_label_rtx ();
9951 for (i = 0; i < nwords; i++)
9952 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9953 operand_subword_force (op1, i, mode),
9954 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9955 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9958 emit_jump (if_true_label);
9959 if (drop_through_label)
9960 emit_label (drop_through_label);
9963 /* Jump according to whether OP0 is 0.
9964 We assume that OP0 has an integer mode that is too wide
9965 for the available compare insns. */
9968 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9970 rtx if_false_label, if_true_label;
9972 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9975 rtx drop_through_label = 0;
9977 /* The fastest way of doing this comparison on almost any machine is to
9978 "or" all the words and compare the result. If all have to be loaded
9979 from memory and this is a very wide item, it's possible this may
9980 be slower, but that's highly unlikely. */
9982 part = gen_reg_rtx (word_mode);
9983 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9984 for (i = 1; i < nwords && part != 0; i++)
9985 part = expand_binop (word_mode, ior_optab, part,
9986 operand_subword_force (op0, i, GET_MODE (op0)),
9987 part, 1, OPTAB_WIDEN);
9991 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9992 NULL_RTX, if_false_label, if_true_label);
9997 /* If we couldn't do the "or" simply, do this with a series of compares. */
9998 if (! if_false_label)
9999 drop_through_label = if_false_label = gen_label_rtx ();
10001 for (i = 0; i < nwords; i++)
10002 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10003 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10004 if_false_label, NULL_RTX);
10007 emit_jump (if_true_label);
10009 if (drop_through_label)
10010 emit_label (drop_through_label);
10013 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10014 (including code to compute the values to be compared)
10015 and set (CC0) according to the result.
10016 The decision as to signed or unsigned comparison must be made by the caller.
10018 We force a stack adjustment unless there are currently
10019 things pushed on the stack that aren't yet used.
10021 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10025 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10027 enum rtx_code code;
10029 enum machine_mode mode;
10034 /* If one operand is constant, make it the second one. Only do this
10035 if the other operand is not constant as well. */
10037 if (swap_commutative_operands_p (op0, op1))
10042 code = swap_condition (code);
10045 if (flag_force_mem)
10047 op0 = force_not_mem (op0);
10048 op1 = force_not_mem (op1);
10051 do_pending_stack_adjust ();
10053 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10054 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10058 /* There's no need to do this now that combine.c can eliminate lots of
10059 sign extensions. This can be less efficient in certain cases on other
10062 /* If this is a signed equality comparison, we can do it as an
10063 unsigned comparison since zero-extension is cheaper than sign
10064 extension and comparisons with zero are done as unsigned. This is
10065 the case even on machines that can do fast sign extension, since
10066 zero-extension is easier to combine with other operations than
10067 sign-extension is. If we are comparing against a constant, we must
10068 convert it to what it would look like unsigned. */
10069 if ((code == EQ || code == NE) && ! unsignedp
10070 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10072 if (GET_CODE (op1) == CONST_INT
10073 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10074 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10079 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10082 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10084 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10088 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10089 The decision as to signed or unsigned comparison must be made by the caller.
10091 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10095 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10096 if_false_label, if_true_label)
10098 enum rtx_code code;
10100 enum machine_mode mode;
10102 rtx if_false_label, if_true_label;
10105 int dummy_true_label = 0;
10107 /* Reverse the comparison if that is safe and we want to jump if it is
10109 if (! if_true_label && ! FLOAT_MODE_P (mode))
10111 if_true_label = if_false_label;
10112 if_false_label = 0;
10113 code = reverse_condition (code);
10116 /* If one operand is constant, make it the second one. Only do this
10117 if the other operand is not constant as well. */
10119 if (swap_commutative_operands_p (op0, op1))
10124 code = swap_condition (code);
10127 if (flag_force_mem)
10129 op0 = force_not_mem (op0);
10130 op1 = force_not_mem (op1);
10133 do_pending_stack_adjust ();
10135 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10136 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10138 if (tem == const_true_rtx)
10141 emit_jump (if_true_label);
10145 if (if_false_label)
10146 emit_jump (if_false_label);
10152 /* There's no need to do this now that combine.c can eliminate lots of
10153 sign extensions. This can be less efficient in certain cases on other
10156 /* If this is a signed equality comparison, we can do it as an
10157 unsigned comparison since zero-extension is cheaper than sign
10158 extension and comparisons with zero are done as unsigned. This is
10159 the case even on machines that can do fast sign extension, since
10160 zero-extension is easier to combine with other operations than
10161 sign-extension is. If we are comparing against a constant, we must
10162 convert it to what it would look like unsigned. */
10163 if ((code == EQ || code == NE) && ! unsignedp
10164 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10166 if (GET_CODE (op1) == CONST_INT
10167 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10168 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10173 if (! if_true_label)
10175 dummy_true_label = 1;
10176 if_true_label = gen_label_rtx ();
10179 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10182 if (if_false_label)
10183 emit_jump (if_false_label);
10184 if (dummy_true_label)
10185 emit_label (if_true_label);
10188 /* Generate code for a comparison expression EXP (including code to compute
10189 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10190 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10191 generated code will drop through.
10192 SIGNED_CODE should be the rtx operation for this comparison for
10193 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10195 We force a stack adjustment unless there are currently
10196 things pushed on the stack that aren't yet used. */
10199 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10202 enum rtx_code signed_code, unsigned_code;
10203 rtx if_false_label, if_true_label;
10207 enum machine_mode mode;
10209 enum rtx_code code;
10211 /* Don't crash if the comparison was erroneous. */
10212 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10213 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10216 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10217 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10220 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10221 mode = TYPE_MODE (type);
10222 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10223 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10224 || (GET_MODE_BITSIZE (mode)
10225 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10228 /* op0 might have been replaced by promoted constant, in which
10229 case the type of second argument should be used. */
10230 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10231 mode = TYPE_MODE (type);
10233 unsignedp = TREE_UNSIGNED (type);
10234 code = unsignedp ? unsigned_code : signed_code;
10236 #ifdef HAVE_canonicalize_funcptr_for_compare
10237 /* If function pointers need to be "canonicalized" before they can
10238 be reliably compared, then canonicalize them. */
10239 if (HAVE_canonicalize_funcptr_for_compare
10240 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10241 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10244 rtx new_op0 = gen_reg_rtx (mode);
10246 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10250 if (HAVE_canonicalize_funcptr_for_compare
10251 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10252 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10255 rtx new_op1 = gen_reg_rtx (mode);
10257 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10262 /* Do any postincrements in the expression that was tested. */
10265 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10267 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10268 if_false_label, if_true_label);
10271 /* Generate code to calculate EXP using a store-flag instruction
10272 and return an rtx for the result. EXP is either a comparison
10273 or a TRUTH_NOT_EXPR whose operand is a comparison.
10275 If TARGET is nonzero, store the result there if convenient.
10277 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10280 Return zero if there is no suitable set-flag instruction
10281 available on this machine.
10283 Once expand_expr has been called on the arguments of the comparison,
10284 we are committed to doing the store flag, since it is not safe to
10285 re-evaluate the expression. We emit the store-flag insn by calling
10286 emit_store_flag, but only expand the arguments if we have a reason
10287 to believe that emit_store_flag will be successful. If we think that
10288 it will, but it isn't, we have to simulate the store-flag with a
10289 set/jump/set sequence. */
10292 do_store_flag (exp, target, mode, only_cheap)
10295 enum machine_mode mode;
10298 enum rtx_code code;
10299 tree arg0, arg1, type;
10301 enum machine_mode operand_mode;
10305 enum insn_code icode;
10306 rtx subtarget = target;
10309 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10310 result at the end. We can't simply invert the test since it would
10311 have already been inverted if it were valid. This case occurs for
10312 some floating-point comparisons. */
10314 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10315 invert = 1, exp = TREE_OPERAND (exp, 0);
10317 arg0 = TREE_OPERAND (exp, 0);
10318 arg1 = TREE_OPERAND (exp, 1);
10320 /* Don't crash if the comparison was erroneous. */
10321 if (arg0 == error_mark_node || arg1 == error_mark_node)
10324 type = TREE_TYPE (arg0);
10325 operand_mode = TYPE_MODE (type);
10326 unsignedp = TREE_UNSIGNED (type);
10328 /* We won't bother with BLKmode store-flag operations because it would mean
10329 passing a lot of information to emit_store_flag. */
10330 if (operand_mode == BLKmode)
10333 /* We won't bother with store-flag operations involving function pointers
10334 when function pointers must be canonicalized before comparisons. */
10335 #ifdef HAVE_canonicalize_funcptr_for_compare
10336 if (HAVE_canonicalize_funcptr_for_compare
10337 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10338 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10340 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10341 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10342 == FUNCTION_TYPE))))
10349 /* Get the rtx comparison code to use. We know that EXP is a comparison
10350 operation of some type. Some comparisons against 1 and -1 can be
10351 converted to comparisons with zero. Do so here so that the tests
10352 below will be aware that we have a comparison with zero. These
10353 tests will not catch constants in the first operand, but constants
10354 are rarely passed as the first operand. */
10356 switch (TREE_CODE (exp))
10365 if (integer_onep (arg1))
10366 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10368 code = unsignedp ? LTU : LT;
10371 if (! unsignedp && integer_all_onesp (arg1))
10372 arg1 = integer_zero_node, code = LT;
10374 code = unsignedp ? LEU : LE;
10377 if (! unsignedp && integer_all_onesp (arg1))
10378 arg1 = integer_zero_node, code = GE;
10380 code = unsignedp ? GTU : GT;
10383 if (integer_onep (arg1))
10384 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10386 code = unsignedp ? GEU : GE;
10389 case UNORDERED_EXPR:
10415 /* Put a constant second. */
10416 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10418 tem = arg0; arg0 = arg1; arg1 = tem;
10419 code = swap_condition (code);
10422 /* If this is an equality or inequality test of a single bit, we can
10423 do this by shifting the bit being tested to the low-order bit and
10424 masking the result with the constant 1. If the condition was EQ,
10425 we xor it with 1. This does not require an scc insn and is faster
10426 than an scc insn even if we have it. */
10428 if ((code == NE || code == EQ)
10429 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10430 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10432 tree inner = TREE_OPERAND (arg0, 0);
10433 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10436 /* If INNER is a right shift of a constant and it plus BITNUM does
10437 not overflow, adjust BITNUM and INNER. */
10439 if (TREE_CODE (inner) == RSHIFT_EXPR
10440 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10441 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10442 && bitnum < TYPE_PRECISION (type)
10443 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10444 bitnum - TYPE_PRECISION (type)))
10446 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10447 inner = TREE_OPERAND (inner, 0);
10450 /* If we are going to be able to omit the AND below, we must do our
10451 operations as unsigned. If we must use the AND, we have a choice.
10452 Normally unsigned is faster, but for some machines signed is. */
10453 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10454 #ifdef LOAD_EXTEND_OP
10455 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10461 if (! get_subtarget (subtarget)
10462 || GET_MODE (subtarget) != operand_mode
10463 || ! safe_from_p (subtarget, inner, 1))
10466 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10469 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10470 size_int (bitnum), subtarget, ops_unsignedp);
10472 if (GET_MODE (op0) != mode)
10473 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10475 if ((code == EQ && ! invert) || (code == NE && invert))
10476 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10477 ops_unsignedp, OPTAB_LIB_WIDEN);
10479 /* Put the AND last so it can combine with more things. */
10480 if (bitnum != TYPE_PRECISION (type) - 1)
10481 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10486 /* Now see if we are likely to be able to do this. Return if not. */
10487 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10490 icode = setcc_gen_code[(int) code];
10491 if (icode == CODE_FOR_nothing
10492 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10494 /* We can only do this if it is one of the special cases that
10495 can be handled without an scc insn. */
10496 if ((code == LT && integer_zerop (arg1))
10497 || (! only_cheap && code == GE && integer_zerop (arg1)))
10499 else if (BRANCH_COST >= 0
10500 && ! only_cheap && (code == NE || code == EQ)
10501 && TREE_CODE (type) != REAL_TYPE
10502 && ((abs_optab->handlers[(int) operand_mode].insn_code
10503 != CODE_FOR_nothing)
10504 || (ffs_optab->handlers[(int) operand_mode].insn_code
10505 != CODE_FOR_nothing)))
10511 if (! get_subtarget (target)
10512 || GET_MODE (subtarget) != operand_mode
10513 || ! safe_from_p (subtarget, arg1, 1))
10516 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10517 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10520 target = gen_reg_rtx (mode);
10522 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10523 because, if the emit_store_flag does anything it will succeed and
10524 OP0 and OP1 will not be used subsequently. */
10526 result = emit_store_flag (target, code,
10527 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10528 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10529 operand_mode, unsignedp, 1);
10534 result = expand_binop (mode, xor_optab, result, const1_rtx,
10535 result, 0, OPTAB_LIB_WIDEN);
10539 /* If this failed, we have to do this with set/compare/jump/set code. */
10540 if (GET_CODE (target) != REG
10541 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10542 target = gen_reg_rtx (GET_MODE (target));
10544 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10545 result = compare_from_rtx (op0, op1, code, unsignedp,
10546 operand_mode, NULL_RTX);
10547 if (GET_CODE (result) == CONST_INT)
10548 return (((result == const0_rtx && ! invert)
10549 || (result != const0_rtx && invert))
10550 ? const0_rtx : const1_rtx);
10552 /* The code of RESULT may not match CODE if compare_from_rtx
10553 decided to swap its operands and reverse the original code.
10555 We know that compare_from_rtx returns either a CONST_INT or
10556 a new comparison code, so it is safe to just extract the
10557 code from RESULT. */
10558 code = GET_CODE (result);
10560 label = gen_label_rtx ();
10561 if (bcc_gen_fctn[(int) code] == 0)
10564 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10565 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10566 emit_label (label);
10572 /* Stubs in case we haven't got a casesi insn. */
10573 #ifndef HAVE_casesi
10574 # define HAVE_casesi 0
10575 # define gen_casesi(a, b, c, d, e) (0)
10576 # define CODE_FOR_casesi CODE_FOR_nothing
10579 /* If the machine does not have a case insn that compares the bounds,
10580 this means extra overhead for dispatch tables, which raises the
10581 threshold for using them. */
10582 #ifndef CASE_VALUES_THRESHOLD
10583 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10584 #endif /* CASE_VALUES_THRESHOLD */
10587 case_values_threshold ()
10589 return CASE_VALUES_THRESHOLD;
10592 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10593 0 otherwise (i.e. if there is no casesi instruction). */
10595 try_casesi (index_type, index_expr, minval, range,
10596 table_label, default_label)
10597 tree index_type, index_expr, minval, range;
10598 rtx table_label ATTRIBUTE_UNUSED;
10601 enum machine_mode index_mode = SImode;
10602 int index_bits = GET_MODE_BITSIZE (index_mode);
10603 rtx op1, op2, index;
10604 enum machine_mode op_mode;
10609 /* Convert the index to SImode. */
10610 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10612 enum machine_mode omode = TYPE_MODE (index_type);
10613 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10615 /* We must handle the endpoints in the original mode. */
10616 index_expr = build (MINUS_EXPR, index_type,
10617 index_expr, minval);
10618 minval = integer_zero_node;
10619 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10620 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10621 omode, 1, default_label);
10622 /* Now we can safely truncate. */
10623 index = convert_to_mode (index_mode, index, 0);
10627 if (TYPE_MODE (index_type) != index_mode)
10629 index_expr = convert ((*lang_hooks.types.type_for_size)
10630 (index_bits, 0), index_expr);
10631 index_type = TREE_TYPE (index_expr);
10634 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10637 index = protect_from_queue (index, 0);
10638 do_pending_stack_adjust ();
10640 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10641 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10643 index = copy_to_mode_reg (op_mode, index);
10645 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10647 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10648 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10649 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10650 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10652 op1 = copy_to_mode_reg (op_mode, op1);
10654 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10656 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10657 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10658 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10659 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10661 op2 = copy_to_mode_reg (op_mode, op2);
10663 emit_jump_insn (gen_casesi (index, op1, op2,
10664 table_label, default_label));
10668 /* Attempt to generate a tablejump instruction; same concept. */
10669 #ifndef HAVE_tablejump
10670 #define HAVE_tablejump 0
10671 #define gen_tablejump(x, y) (0)
10674 /* Subroutine of the next function.
10676 INDEX is the value being switched on, with the lowest value
10677 in the table already subtracted.
10678 MODE is its expected mode (needed if INDEX is constant).
10679 RANGE is the length of the jump table.
10680 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10682 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10683 index value is out of range. */
10686 do_tablejump (index, mode, range, table_label, default_label)
10687 rtx index, range, table_label, default_label;
10688 enum machine_mode mode;
10692 /* Do an unsigned comparison (in the proper mode) between the index
10693 expression and the value which represents the length of the range.
10694 Since we just finished subtracting the lower bound of the range
10695 from the index expression, this comparison allows us to simultaneously
10696 check that the original index expression value is both greater than
10697 or equal to the minimum value of the range and less than or equal to
10698 the maximum value of the range. */
10700 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10703 /* If index is in range, it must fit in Pmode.
10704 Convert to Pmode so we can index with it. */
10706 index = convert_to_mode (Pmode, index, 1);
10708 /* Don't let a MEM slip thru, because then INDEX that comes
10709 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10710 and break_out_memory_refs will go to work on it and mess it up. */
10711 #ifdef PIC_CASE_VECTOR_ADDRESS
10712 if (flag_pic && GET_CODE (index) != REG)
10713 index = copy_to_mode_reg (Pmode, index);
10716 /* If flag_force_addr were to affect this address
10717 it could interfere with the tricky assumptions made
10718 about addresses that contain label-refs,
10719 which may be valid only very near the tablejump itself. */
10720 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10721 GET_MODE_SIZE, because this indicates how large insns are. The other
10722 uses should all be Pmode, because they are addresses. This code
10723 could fail if addresses and insns are not the same size. */
10724 index = gen_rtx_PLUS (Pmode,
10725 gen_rtx_MULT (Pmode, index,
10726 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10727 gen_rtx_LABEL_REF (Pmode, table_label));
10728 #ifdef PIC_CASE_VECTOR_ADDRESS
10730 index = PIC_CASE_VECTOR_ADDRESS (index);
10733 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10734 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10735 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10736 RTX_UNCHANGING_P (vector) = 1;
10737 convert_move (temp, vector, 0);
10739 emit_jump_insn (gen_tablejump (temp, table_label));
10741 /* If we are generating PIC code or if the table is PC-relative, the
10742 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10743 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10748 try_tablejump (index_type, index_expr, minval, range,
10749 table_label, default_label)
10750 tree index_type, index_expr, minval, range;
10751 rtx table_label, default_label;
10755 if (! HAVE_tablejump)
10758 index_expr = fold (build (MINUS_EXPR, index_type,
10759 convert (index_type, index_expr),
10760 convert (index_type, minval)));
10761 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10763 index = protect_from_queue (index, 0);
10764 do_pending_stack_adjust ();
10766 do_tablejump (index, TYPE_MODE (index_type),
10767 convert_modes (TYPE_MODE (index_type),
10768 TYPE_MODE (TREE_TYPE (range)),
10769 expand_expr (range, NULL_RTX,
10771 TREE_UNSIGNED (TREE_TYPE (range))),
10772 table_label, default_label);
10776 #include "gt-expr.h"