1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
5 Free Software Foundation, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* Middle-to-low level generation of rtx code and insns.
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
39 #include "coretypes.h"
41 #include "diagnostic-core.h"
49 #include "hard-reg-set.h"
51 #include "insn-config.h"
54 #include "basic-block.h"
57 #include "langhooks.h"
58 #include "tree-pass.h"
62 #include "tree-flow.h"
64 struct target_rtl default_target_rtl;
66 struct target_rtl *this_target_rtl = &default_target_rtl;
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
71 /* Commonly used modes. */
73 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
75 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
76 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
78 /* Datastructures maintained for currently processed function in RTL form. */
80 struct rtl_data x_rtl;
82 /* Indexed by pseudo register number, gives the rtx for that pseudo.
83 Allocated in parallel with regno_pointer_align.
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
89 /* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
92 static GTY(()) int label_num = 1;
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
97 is set only for MODE_INT and MODE_VECTOR_INT modes. */
99 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
107 REAL_VALUE_TYPE dconsthalf;
109 /* Record fixed-point constant 0 and 1. */
110 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
111 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
113 /* We make one copy of (const_int C) where C is in
114 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
115 to save space during the compilation and simplify comparisons of
118 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
120 /* A hash table storing CONST_INTs whose absolute value is greater
121 than MAX_SAVED_CONST_INT. */
123 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
124 htab_t const_int_htab;
126 /* A hash table storing memory attribute structures. */
127 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
128 htab_t mem_attrs_htab;
130 /* A hash table storing register attribute structures. */
131 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
132 htab_t reg_attrs_htab;
134 /* A hash table storing all CONST_DOUBLEs. */
135 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
136 htab_t const_double_htab;
138 /* A hash table storing all CONST_FIXEDs. */
139 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
140 htab_t const_fixed_htab;
142 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
143 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
144 #define last_location (crtl->emit.x_last_location)
145 #define first_label_num (crtl->emit.x_first_label_num)
147 static rtx make_call_insn_raw (rtx);
148 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
149 static void set_used_decls (tree);
150 static void mark_label_nuses (rtx);
151 static hashval_t const_int_htab_hash (const void *);
152 static int const_int_htab_eq (const void *, const void *);
153 static hashval_t const_double_htab_hash (const void *);
154 static int const_double_htab_eq (const void *, const void *);
155 static rtx lookup_const_double (rtx);
156 static hashval_t const_fixed_htab_hash (const void *);
157 static int const_fixed_htab_eq (const void *, const void *);
158 static rtx lookup_const_fixed (rtx);
159 static hashval_t mem_attrs_htab_hash (const void *);
160 static int mem_attrs_htab_eq (const void *, const void *);
161 static hashval_t reg_attrs_htab_hash (const void *);
162 static int reg_attrs_htab_eq (const void *, const void *);
163 static reg_attrs *get_reg_attrs (tree, int);
164 static rtx gen_const_vector (enum machine_mode, int);
165 static void copy_rtx_if_shared_1 (rtx *orig);
167 /* Probability of the conditional branch currently proceeded by try_split.
168 Set to -1 otherwise. */
169 int split_branch_probability = -1;
171 /* Returns a hash code for X (which is a really a CONST_INT). */
174 const_int_htab_hash (const void *x)
176 return (hashval_t) INTVAL ((const_rtx) x);
179 /* Returns nonzero if the value represented by X (which is really a
180 CONST_INT) is the same as that given by Y (which is really a
184 const_int_htab_eq (const void *x, const void *y)
186 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
189 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
191 const_double_htab_hash (const void *x)
193 const_rtx const value = (const_rtx) x;
196 if (GET_MODE (value) == VOIDmode)
197 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
200 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
201 /* MODE is used in the comparison, so it should be in the hash. */
202 h ^= GET_MODE (value);
207 /* Returns nonzero if the value represented by X (really a ...)
208 is the same as that represented by Y (really a ...) */
210 const_double_htab_eq (const void *x, const void *y)
212 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
214 if (GET_MODE (a) != GET_MODE (b))
216 if (GET_MODE (a) == VOIDmode)
217 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
218 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
220 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
221 CONST_DOUBLE_REAL_VALUE (b));
224 /* Returns a hash code for X (which is really a CONST_FIXED). */
227 const_fixed_htab_hash (const void *x)
229 const_rtx const value = (const_rtx) x;
232 h = fixed_hash (CONST_FIXED_VALUE (value));
233 /* MODE is used in the comparison, so it should be in the hash. */
234 h ^= GET_MODE (value);
238 /* Returns nonzero if the value represented by X (really a ...)
239 is the same as that represented by Y (really a ...). */
242 const_fixed_htab_eq (const void *x, const void *y)
244 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
246 if (GET_MODE (a) != GET_MODE (b))
248 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
251 /* Returns a hash code for X (which is a really a mem_attrs *). */
254 mem_attrs_htab_hash (const void *x)
256 const mem_attrs *const p = (const mem_attrs *) x;
258 return (p->alias ^ (p->align * 1000)
259 ^ (p->addrspace * 4000)
260 ^ ((p->offset_known_p ? p->offset : 0) * 50000)
261 ^ ((p->size_known_p ? p->size : 0) * 2500000)
262 ^ (size_t) iterative_hash_expr (p->expr, 0));
265 /* Return true if the given memory attributes are equal. */
268 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
270 return (p->alias == q->alias
271 && p->offset_known_p == q->offset_known_p
272 && (!p->offset_known_p || p->offset == q->offset)
273 && p->size_known_p == q->size_known_p
274 && (!p->size_known_p || p->size == q->size)
275 && p->align == q->align
276 && p->addrspace == q->addrspace
277 && (p->expr == q->expr
278 || (p->expr != NULL_TREE && q->expr != NULL_TREE
279 && operand_equal_p (p->expr, q->expr, 0))));
282 /* Returns nonzero if the value represented by X (which is really a
283 mem_attrs *) is the same as that given by Y (which is also really a
287 mem_attrs_htab_eq (const void *x, const void *y)
289 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
292 /* Set MEM's memory attributes so that they are the same as ATTRS. */
295 set_mem_attrs (rtx mem, mem_attrs *attrs)
299 /* If everything is the default, we can just clear the attributes. */
300 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
306 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
309 *slot = ggc_alloc_mem_attrs ();
310 memcpy (*slot, attrs, sizeof (mem_attrs));
313 MEM_ATTRS (mem) = (mem_attrs *) *slot;
316 /* Returns a hash code for X (which is a really a reg_attrs *). */
319 reg_attrs_htab_hash (const void *x)
321 const reg_attrs *const p = (const reg_attrs *) x;
323 return ((p->offset * 1000) ^ (intptr_t) p->decl);
326 /* Returns nonzero if the value represented by X (which is really a
327 reg_attrs *) is the same as that given by Y (which is also really a
331 reg_attrs_htab_eq (const void *x, const void *y)
333 const reg_attrs *const p = (const reg_attrs *) x;
334 const reg_attrs *const q = (const reg_attrs *) y;
336 return (p->decl == q->decl && p->offset == q->offset);
338 /* Allocate a new reg_attrs structure and insert it into the hash table if
339 one identical to it is not already in the table. We are doing this for
343 get_reg_attrs (tree decl, int offset)
348 /* If everything is the default, we can just return zero. */
349 if (decl == 0 && offset == 0)
353 attrs.offset = offset;
355 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
358 *slot = ggc_alloc_reg_attrs ();
359 memcpy (*slot, &attrs, sizeof (reg_attrs));
362 return (reg_attrs *) *slot;
367 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
373 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
374 MEM_VOLATILE_P (x) = true;
380 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
381 don't attempt to share with the various global pieces of rtl (such as
382 frame_pointer_rtx). */
385 gen_raw_REG (enum machine_mode mode, int regno)
387 rtx x = gen_rtx_raw_REG (mode, regno);
388 ORIGINAL_REGNO (x) = regno;
392 /* There are some RTL codes that require special attention; the generation
393 functions do the raw handling. If you add to this list, modify
394 special_rtx in gengenrtl.c as well. */
397 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
401 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
402 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
404 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
405 if (const_true_rtx && arg == STORE_FLAG_VALUE)
406 return const_true_rtx;
409 /* Look up the CONST_INT in the hash table. */
410 slot = htab_find_slot_with_hash (const_int_htab, &arg,
411 (hashval_t) arg, INSERT);
413 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
419 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
421 return GEN_INT (trunc_int_for_mode (c, mode));
424 /* CONST_DOUBLEs might be created from pairs of integers, or from
425 REAL_VALUE_TYPEs. Also, their length is known only at run time,
426 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
428 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
429 hash table. If so, return its counterpart; otherwise add it
430 to the hash table and return it. */
432 lookup_const_double (rtx real)
434 void **slot = htab_find_slot (const_double_htab, real, INSERT);
441 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
442 VALUE in mode MODE. */
444 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
446 rtx real = rtx_alloc (CONST_DOUBLE);
447 PUT_MODE (real, mode);
451 return lookup_const_double (real);
454 /* Determine whether FIXED, a CONST_FIXED, already exists in the
455 hash table. If so, return its counterpart; otherwise add it
456 to the hash table and return it. */
459 lookup_const_fixed (rtx fixed)
461 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
468 /* Return a CONST_FIXED rtx for a fixed-point value specified by
469 VALUE in mode MODE. */
472 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
474 rtx fixed = rtx_alloc (CONST_FIXED);
475 PUT_MODE (fixed, mode);
479 return lookup_const_fixed (fixed);
482 /* Constructs double_int from rtx CST. */
485 rtx_to_double_int (const_rtx cst)
489 if (CONST_INT_P (cst))
490 r = shwi_to_double_int (INTVAL (cst));
491 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
493 r.low = CONST_DOUBLE_LOW (cst);
494 r.high = CONST_DOUBLE_HIGH (cst);
503 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
507 immed_double_int_const (double_int i, enum machine_mode mode)
509 return immed_double_const (i.low, i.high, mode);
512 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
513 of ints: I0 is the low-order word and I1 is the high-order word.
514 Do not use this routine for non-integer modes; convert to
515 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
518 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
523 /* There are the following cases (note that there are no modes with
524 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
526 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
528 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
529 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
530 from copies of the sign bit, and sign of i0 and i1 are the same), then
531 we return a CONST_INT for i0.
532 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
533 if (mode != VOIDmode)
535 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
536 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
537 /* We can get a 0 for an error mark. */
538 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
539 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
541 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
542 return gen_int_mode (i0, mode);
544 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
547 /* If this integer fits in one word, return a CONST_INT. */
548 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
551 /* We use VOIDmode for integers. */
552 value = rtx_alloc (CONST_DOUBLE);
553 PUT_MODE (value, VOIDmode);
555 CONST_DOUBLE_LOW (value) = i0;
556 CONST_DOUBLE_HIGH (value) = i1;
558 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
559 XWINT (value, i) = 0;
561 return lookup_const_double (value);
565 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
567 /* In case the MD file explicitly references the frame pointer, have
568 all such references point to the same frame pointer. This is
569 used during frame pointer elimination to distinguish the explicit
570 references to these registers from pseudos that happened to be
573 If we have eliminated the frame pointer or arg pointer, we will
574 be using it as a normal register, for example as a spill
575 register. In such cases, we might be accessing it in a mode that
576 is not Pmode and therefore cannot use the pre-allocated rtx.
578 Also don't do this when we are making new REGs in reload, since
579 we don't want to get confused with the real pointers. */
581 if (mode == Pmode && !reload_in_progress)
583 if (regno == FRAME_POINTER_REGNUM
584 && (!reload_completed || frame_pointer_needed))
585 return frame_pointer_rtx;
586 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
587 if (regno == HARD_FRAME_POINTER_REGNUM
588 && (!reload_completed || frame_pointer_needed))
589 return hard_frame_pointer_rtx;
591 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
592 if (regno == ARG_POINTER_REGNUM)
593 return arg_pointer_rtx;
595 #ifdef RETURN_ADDRESS_POINTER_REGNUM
596 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
597 return return_address_pointer_rtx;
599 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
600 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
601 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
602 return pic_offset_table_rtx;
603 if (regno == STACK_POINTER_REGNUM)
604 return stack_pointer_rtx;
608 /* If the per-function register table has been set up, try to re-use
609 an existing entry in that table to avoid useless generation of RTL.
611 This code is disabled for now until we can fix the various backends
612 which depend on having non-shared hard registers in some cases. Long
613 term we want to re-enable this code as it can significantly cut down
614 on the amount of useless RTL that gets generated.
616 We'll also need to fix some code that runs after reload that wants to
617 set ORIGINAL_REGNO. */
622 && regno < FIRST_PSEUDO_REGISTER
623 && reg_raw_mode[regno] == mode)
624 return regno_reg_rtx[regno];
627 return gen_raw_REG (mode, regno);
631 gen_rtx_MEM (enum machine_mode mode, rtx addr)
633 rtx rt = gen_rtx_raw_MEM (mode, addr);
635 /* This field is not cleared by the mere allocation of the rtx, so
642 /* Generate a memory referring to non-trapping constant memory. */
645 gen_const_mem (enum machine_mode mode, rtx addr)
647 rtx mem = gen_rtx_MEM (mode, addr);
648 MEM_READONLY_P (mem) = 1;
649 MEM_NOTRAP_P (mem) = 1;
653 /* Generate a MEM referring to fixed portions of the frame, e.g., register
657 gen_frame_mem (enum machine_mode mode, rtx addr)
659 rtx mem = gen_rtx_MEM (mode, addr);
660 MEM_NOTRAP_P (mem) = 1;
661 set_mem_alias_set (mem, get_frame_alias_set ());
665 /* Generate a MEM referring to a temporary use of the stack, not part
666 of the fixed stack frame. For example, something which is pushed
667 by a target splitter. */
669 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
671 rtx mem = gen_rtx_MEM (mode, addr);
672 MEM_NOTRAP_P (mem) = 1;
673 if (!cfun->calls_alloca)
674 set_mem_alias_set (mem, get_frame_alias_set ());
678 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
679 this construct would be valid, and false otherwise. */
682 validate_subreg (enum machine_mode omode, enum machine_mode imode,
683 const_rtx reg, unsigned int offset)
685 unsigned int isize = GET_MODE_SIZE (imode);
686 unsigned int osize = GET_MODE_SIZE (omode);
688 /* All subregs must be aligned. */
689 if (offset % osize != 0)
692 /* The subreg offset cannot be outside the inner object. */
696 /* ??? This should not be here. Temporarily continue to allow word_mode
697 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
698 Generally, backends are doing something sketchy but it'll take time to
700 if (omode == word_mode)
702 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
703 is the culprit here, and not the backends. */
704 else if (osize >= UNITS_PER_WORD && isize >= osize)
706 /* Allow component subregs of complex and vector. Though given the below
707 extraction rules, it's not always clear what that means. */
708 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
709 && GET_MODE_INNER (imode) == omode)
711 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
712 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
713 represent this. It's questionable if this ought to be represented at
714 all -- why can't this all be hidden in post-reload splitters that make
715 arbitrarily mode changes to the registers themselves. */
716 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
718 /* Subregs involving floating point modes are not allowed to
719 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
720 (subreg:SI (reg:DF) 0) isn't. */
721 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
727 /* Paradoxical subregs must have offset zero. */
731 /* This is a normal subreg. Verify that the offset is representable. */
733 /* For hard registers, we already have most of these rules collected in
734 subreg_offset_representable_p. */
735 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
737 unsigned int regno = REGNO (reg);
739 #ifdef CANNOT_CHANGE_MODE_CLASS
740 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
741 && GET_MODE_INNER (imode) == omode)
743 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
747 return subreg_offset_representable_p (regno, imode, offset, omode);
750 /* For pseudo registers, we want most of the same checks. Namely:
751 If the register no larger than a word, the subreg must be lowpart.
752 If the register is larger than a word, the subreg must be the lowpart
753 of a subword. A subreg does *not* perform arbitrary bit extraction.
754 Given that we've already checked mode/offset alignment, we only have
755 to check subword subregs here. */
756 if (osize < UNITS_PER_WORD)
758 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
759 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
760 if (offset % UNITS_PER_WORD != low_off)
767 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
769 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
770 return gen_rtx_raw_SUBREG (mode, reg, offset);
773 /* Generate a SUBREG representing the least-significant part of REG if MODE
774 is smaller than mode of REG, otherwise paradoxical SUBREG. */
777 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
779 enum machine_mode inmode;
781 inmode = GET_MODE (reg);
782 if (inmode == VOIDmode)
784 return gen_rtx_SUBREG (mode, reg,
785 subreg_lowpart_offset (mode, inmode));
789 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
792 gen_rtvec (int n, ...)
800 /* Don't allocate an empty rtvec... */
807 rt_val = rtvec_alloc (n);
809 for (i = 0; i < n; i++)
810 rt_val->elem[i] = va_arg (p, rtx);
817 gen_rtvec_v (int n, rtx *argp)
822 /* Don't allocate an empty rtvec... */
826 rt_val = rtvec_alloc (n);
828 for (i = 0; i < n; i++)
829 rt_val->elem[i] = *argp++;
834 /* Return the number of bytes between the start of an OUTER_MODE
835 in-memory value and the start of an INNER_MODE in-memory value,
836 given that the former is a lowpart of the latter. It may be a
837 paradoxical lowpart, in which case the offset will be negative
838 on big-endian targets. */
841 byte_lowpart_offset (enum machine_mode outer_mode,
842 enum machine_mode inner_mode)
844 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
845 return subreg_lowpart_offset (outer_mode, inner_mode);
847 return -subreg_lowpart_offset (inner_mode, outer_mode);
850 /* Generate a REG rtx for a new pseudo register of mode MODE.
851 This pseudo is assigned the next sequential register number. */
854 gen_reg_rtx (enum machine_mode mode)
857 unsigned int align = GET_MODE_ALIGNMENT (mode);
859 gcc_assert (can_create_pseudo_p ());
861 /* If a virtual register with bigger mode alignment is generated,
862 increase stack alignment estimation because it might be spilled
864 if (SUPPORTS_STACK_ALIGNMENT
865 && crtl->stack_alignment_estimated < align
866 && !crtl->stack_realign_processed)
868 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
869 if (crtl->stack_alignment_estimated < min_align)
870 crtl->stack_alignment_estimated = min_align;
873 if (generating_concat_p
874 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
875 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
877 /* For complex modes, don't make a single pseudo.
878 Instead, make a CONCAT of two pseudos.
879 This allows noncontiguous allocation of the real and imaginary parts,
880 which makes much better code. Besides, allocating DCmode
881 pseudos overstrains reload on some machines like the 386. */
882 rtx realpart, imagpart;
883 enum machine_mode partmode = GET_MODE_INNER (mode);
885 realpart = gen_reg_rtx (partmode);
886 imagpart = gen_reg_rtx (partmode);
887 return gen_rtx_CONCAT (mode, realpart, imagpart);
890 /* Make sure regno_pointer_align, and regno_reg_rtx are large
891 enough to have an element for this pseudo reg number. */
893 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
895 int old_size = crtl->emit.regno_pointer_align_length;
899 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
900 memset (tmp + old_size, 0, old_size);
901 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
903 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
904 memset (new1 + old_size, 0, old_size * sizeof (rtx));
905 regno_reg_rtx = new1;
907 crtl->emit.regno_pointer_align_length = old_size * 2;
910 val = gen_raw_REG (mode, reg_rtx_no);
911 regno_reg_rtx[reg_rtx_no++] = val;
915 /* Update NEW with the same attributes as REG, but with OFFSET added
916 to the REG_OFFSET. */
919 update_reg_offset (rtx new_rtx, rtx reg, int offset)
921 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
922 REG_OFFSET (reg) + offset);
925 /* Generate a register with same attributes as REG, but with OFFSET
926 added to the REG_OFFSET. */
929 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
932 rtx new_rtx = gen_rtx_REG (mode, regno);
934 update_reg_offset (new_rtx, reg, offset);
938 /* Generate a new pseudo-register with the same attributes as REG, but
939 with OFFSET added to the REG_OFFSET. */
942 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
944 rtx new_rtx = gen_reg_rtx (mode);
946 update_reg_offset (new_rtx, reg, offset);
950 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
951 new register is a (possibly paradoxical) lowpart of the old one. */
954 adjust_reg_mode (rtx reg, enum machine_mode mode)
956 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
957 PUT_MODE (reg, mode);
960 /* Copy REG's attributes from X, if X has any attributes. If REG and X
961 have different modes, REG is a (possibly paradoxical) lowpart of X. */
964 set_reg_attrs_from_value (rtx reg, rtx x)
968 /* Hard registers can be reused for multiple purposes within the same
969 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
971 if (HARD_REGISTER_P (reg))
974 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
977 if (MEM_OFFSET_KNOWN_P (x))
978 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
979 MEM_OFFSET (x) + offset);
981 mark_reg_pointer (reg, 0);
986 update_reg_offset (reg, x, offset);
988 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
992 /* Generate a REG rtx for a new pseudo register, copying the mode
993 and attributes from X. */
996 gen_reg_rtx_and_attrs (rtx x)
998 rtx reg = gen_reg_rtx (GET_MODE (x));
999 set_reg_attrs_from_value (reg, x);
1003 /* Set the register attributes for registers contained in PARM_RTX.
1004 Use needed values from memory attributes of MEM. */
1007 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1009 if (REG_P (parm_rtx))
1010 set_reg_attrs_from_value (parm_rtx, mem);
1011 else if (GET_CODE (parm_rtx) == PARALLEL)
1013 /* Check for a NULL entry in the first slot, used to indicate that the
1014 parameter goes both on the stack and in registers. */
1015 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1016 for (; i < XVECLEN (parm_rtx, 0); i++)
1018 rtx x = XVECEXP (parm_rtx, 0, i);
1019 if (REG_P (XEXP (x, 0)))
1020 REG_ATTRS (XEXP (x, 0))
1021 = get_reg_attrs (MEM_EXPR (mem),
1022 INTVAL (XEXP (x, 1)));
1027 /* Set the REG_ATTRS for registers in value X, given that X represents
1031 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1033 if (GET_CODE (x) == SUBREG)
1035 gcc_assert (subreg_lowpart_p (x));
1040 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1042 if (GET_CODE (x) == CONCAT)
1044 if (REG_P (XEXP (x, 0)))
1045 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1046 if (REG_P (XEXP (x, 1)))
1047 REG_ATTRS (XEXP (x, 1))
1048 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1050 if (GET_CODE (x) == PARALLEL)
1054 /* Check for a NULL entry, used to indicate that the parameter goes
1055 both on the stack and in registers. */
1056 if (XEXP (XVECEXP (x, 0, 0), 0))
1061 for (i = start; i < XVECLEN (x, 0); i++)
1063 rtx y = XVECEXP (x, 0, i);
1064 if (REG_P (XEXP (y, 0)))
1065 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1070 /* Assign the RTX X to declaration T. */
1073 set_decl_rtl (tree t, rtx x)
1075 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1077 set_reg_attrs_for_decl_rtl (t, x);
1080 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1081 if the ABI requires the parameter to be passed by reference. */
1084 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1086 DECL_INCOMING_RTL (t) = x;
1087 if (x && !by_reference_p)
1088 set_reg_attrs_for_decl_rtl (t, x);
1091 /* Identify REG (which may be a CONCAT) as a user register. */
1094 mark_user_reg (rtx reg)
1096 if (GET_CODE (reg) == CONCAT)
1098 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1099 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1103 gcc_assert (REG_P (reg));
1104 REG_USERVAR_P (reg) = 1;
1108 /* Identify REG as a probable pointer register and show its alignment
1109 as ALIGN, if nonzero. */
1112 mark_reg_pointer (rtx reg, int align)
1114 if (! REG_POINTER (reg))
1116 REG_POINTER (reg) = 1;
1119 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1121 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1122 /* We can no-longer be sure just how aligned this pointer is. */
1123 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1126 /* Return 1 plus largest pseudo reg number used in the current function. */
1134 /* Return 1 + the largest label number used so far in the current function. */
1137 max_label_num (void)
1142 /* Return first label number used in this function (if any were used). */
1145 get_first_label_num (void)
1147 return first_label_num;
1150 /* If the rtx for label was created during the expansion of a nested
1151 function, then first_label_num won't include this label number.
1152 Fix this now so that array indices work later. */
1155 maybe_set_first_label_num (rtx x)
1157 if (CODE_LABEL_NUMBER (x) < first_label_num)
1158 first_label_num = CODE_LABEL_NUMBER (x);
1161 /* Return a value representing some low-order bits of X, where the number
1162 of low-order bits is given by MODE. Note that no conversion is done
1163 between floating-point and fixed-point values, rather, the bit
1164 representation is returned.
1166 This function handles the cases in common between gen_lowpart, below,
1167 and two variants in cse.c and combine.c. These are the cases that can
1168 be safely handled at all points in the compilation.
1170 If this is not a case we can handle, return 0. */
1173 gen_lowpart_common (enum machine_mode mode, rtx x)
1175 int msize = GET_MODE_SIZE (mode);
1178 enum machine_mode innermode;
1180 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1181 so we have to make one up. Yuk. */
1182 innermode = GET_MODE (x);
1184 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1185 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1186 else if (innermode == VOIDmode)
1187 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1189 xsize = GET_MODE_SIZE (innermode);
1191 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1193 if (innermode == mode)
1196 /* MODE must occupy no more words than the mode of X. */
1197 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1198 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1201 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1202 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1205 offset = subreg_lowpart_offset (mode, innermode);
1207 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1208 && (GET_MODE_CLASS (mode) == MODE_INT
1209 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1211 /* If we are getting the low-order part of something that has been
1212 sign- or zero-extended, we can either just use the object being
1213 extended or make a narrower extension. If we want an even smaller
1214 piece than the size of the object being extended, call ourselves
1217 This case is used mostly by combine and cse. */
1219 if (GET_MODE (XEXP (x, 0)) == mode)
1221 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1222 return gen_lowpart_common (mode, XEXP (x, 0));
1223 else if (msize < xsize)
1224 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1226 else if (GET_CODE (x) == SUBREG || REG_P (x)
1227 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1228 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
1229 return simplify_gen_subreg (mode, x, innermode, offset);
1231 /* Otherwise, we can't do this. */
1236 gen_highpart (enum machine_mode mode, rtx x)
1238 unsigned int msize = GET_MODE_SIZE (mode);
1241 /* This case loses if X is a subreg. To catch bugs early,
1242 complain if an invalid MODE is used even in other cases. */
1243 gcc_assert (msize <= UNITS_PER_WORD
1244 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1246 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1247 subreg_highpart_offset (mode, GET_MODE (x)));
1248 gcc_assert (result);
1250 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1251 the target if we have a MEM. gen_highpart must return a valid operand,
1252 emitting code if necessary to do so. */
1255 result = validize_mem (result);
1256 gcc_assert (result);
1262 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1263 be VOIDmode constant. */
1265 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1267 if (GET_MODE (exp) != VOIDmode)
1269 gcc_assert (GET_MODE (exp) == innermode);
1270 return gen_highpart (outermode, exp);
1272 return simplify_gen_subreg (outermode, exp, innermode,
1273 subreg_highpart_offset (outermode, innermode));
1276 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1279 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1281 unsigned int offset = 0;
1282 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1286 if (WORDS_BIG_ENDIAN)
1287 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1288 if (BYTES_BIG_ENDIAN)
1289 offset += difference % UNITS_PER_WORD;
1295 /* Return offset in bytes to get OUTERMODE high part
1296 of the value in mode INNERMODE stored in memory in target format. */
1298 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1300 unsigned int offset = 0;
1301 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1303 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1307 if (! WORDS_BIG_ENDIAN)
1308 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1309 if (! BYTES_BIG_ENDIAN)
1310 offset += difference % UNITS_PER_WORD;
1316 /* Return 1 iff X, assumed to be a SUBREG,
1317 refers to the least significant part of its containing reg.
1318 If X is not a SUBREG, always return 1 (it is its own low part!). */
1321 subreg_lowpart_p (const_rtx x)
1323 if (GET_CODE (x) != SUBREG)
1325 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1328 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1329 == SUBREG_BYTE (x));
1332 /* Return true if X is a paradoxical subreg, false otherwise. */
1334 paradoxical_subreg_p (const_rtx x)
1336 if (GET_CODE (x) != SUBREG)
1338 return (GET_MODE_PRECISION (GET_MODE (x))
1339 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1342 /* Return subword OFFSET of operand OP.
1343 The word number, OFFSET, is interpreted as the word number starting
1344 at the low-order address. OFFSET 0 is the low-order word if not
1345 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1347 If we cannot extract the required word, we return zero. Otherwise,
1348 an rtx corresponding to the requested word will be returned.
1350 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1351 reload has completed, a valid address will always be returned. After
1352 reload, if a valid address cannot be returned, we return zero.
1354 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1355 it is the responsibility of the caller.
1357 MODE is the mode of OP in case it is a CONST_INT.
1359 ??? This is still rather broken for some cases. The problem for the
1360 moment is that all callers of this thing provide no 'goal mode' to
1361 tell us to work with. This exists because all callers were written
1362 in a word based SUBREG world.
1363 Now use of this function can be deprecated by simplify_subreg in most
1368 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1370 if (mode == VOIDmode)
1371 mode = GET_MODE (op);
1373 gcc_assert (mode != VOIDmode);
1375 /* If OP is narrower than a word, fail. */
1377 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1380 /* If we want a word outside OP, return zero. */
1382 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1385 /* Form a new MEM at the requested address. */
1388 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1390 if (! validate_address)
1393 else if (reload_completed)
1395 if (! strict_memory_address_addr_space_p (word_mode,
1397 MEM_ADDR_SPACE (op)))
1401 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1404 /* Rest can be handled by simplify_subreg. */
1405 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1408 /* Similar to `operand_subword', but never return 0. If we can't
1409 extract the required subword, put OP into a register and try again.
1410 The second attempt must succeed. We always validate the address in
1413 MODE is the mode of OP, in case it is CONST_INT. */
1416 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1418 rtx result = operand_subword (op, offset, 1, mode);
1423 if (mode != BLKmode && mode != VOIDmode)
1425 /* If this is a register which can not be accessed by words, copy it
1426 to a pseudo register. */
1428 op = copy_to_reg (op);
1430 op = force_reg (mode, op);
1433 result = operand_subword (op, offset, 1, mode);
1434 gcc_assert (result);
1439 /* Returns 1 if both MEM_EXPR can be considered equal
1443 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1448 if (! expr1 || ! expr2)
1451 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1454 return operand_equal_p (expr1, expr2, 0);
1457 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1458 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1462 get_mem_align_offset (rtx mem, unsigned int align)
1465 unsigned HOST_WIDE_INT offset;
1467 /* This function can't use
1468 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1469 || (MAX (MEM_ALIGN (mem),
1470 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1474 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1476 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1477 for <variable>. get_inner_reference doesn't handle it and
1478 even if it did, the alignment in that case needs to be determined
1479 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1480 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1481 isn't sufficiently aligned, the object it is in might be. */
1482 gcc_assert (MEM_P (mem));
1483 expr = MEM_EXPR (mem);
1484 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1487 offset = MEM_OFFSET (mem);
1490 if (DECL_ALIGN (expr) < align)
1493 else if (INDIRECT_REF_P (expr))
1495 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1498 else if (TREE_CODE (expr) == COMPONENT_REF)
1502 tree inner = TREE_OPERAND (expr, 0);
1503 tree field = TREE_OPERAND (expr, 1);
1504 tree byte_offset = component_ref_field_offset (expr);
1505 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1508 || !host_integerp (byte_offset, 1)
1509 || !host_integerp (bit_offset, 1))
1512 offset += tree_low_cst (byte_offset, 1);
1513 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1515 if (inner == NULL_TREE)
1517 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1518 < (unsigned int) align)
1522 else if (DECL_P (inner))
1524 if (DECL_ALIGN (inner) < align)
1528 else if (TREE_CODE (inner) != COMPONENT_REF)
1536 return offset & ((align / BITS_PER_UNIT) - 1);
1539 /* Given REF (a MEM) and T, either the type of X or the expression
1540 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1541 if we are making a new object of this type. BITPOS is nonzero if
1542 there is an offset outstanding on T that will be applied later. */
1545 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1546 HOST_WIDE_INT bitpos)
1548 HOST_WIDE_INT apply_bitpos = 0;
1550 struct mem_attrs attrs, *defattrs, *refattrs;
1552 /* It can happen that type_for_mode was given a mode for which there
1553 is no language-level type. In which case it returns NULL, which
1558 type = TYPE_P (t) ? t : TREE_TYPE (t);
1559 if (type == error_mark_node)
1562 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1563 wrong answer, as it assumes that DECL_RTL already has the right alias
1564 info. Callers should not set DECL_RTL until after the call to
1565 set_mem_attributes. */
1566 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1568 memset (&attrs, 0, sizeof (attrs));
1570 /* Get the alias set from the expression or type (perhaps using a
1571 front-end routine) and use it. */
1572 attrs.alias = get_alias_set (t);
1574 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1575 MEM_IN_STRUCT_P (ref)
1576 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
1577 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1579 /* If we are making an object of this type, or if this is a DECL, we know
1580 that it is a scalar if the type is not an aggregate. */
1581 if ((objectp || DECL_P (t))
1582 && ! AGGREGATE_TYPE_P (type)
1583 && TREE_CODE (type) != COMPLEX_TYPE)
1584 MEM_SCALAR_P (ref) = 1;
1586 /* Default values from pre-existing memory attributes if present. */
1587 refattrs = MEM_ATTRS (ref);
1590 /* ??? Can this ever happen? Calling this routine on a MEM that
1591 already carries memory attributes should probably be invalid. */
1592 attrs.expr = refattrs->expr;
1593 attrs.offset_known_p = refattrs->offset_known_p;
1594 attrs.offset = refattrs->offset;
1595 attrs.size_known_p = refattrs->size_known_p;
1596 attrs.size = refattrs->size;
1597 attrs.align = refattrs->align;
1600 /* Otherwise, default values from the mode of the MEM reference. */
1603 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1604 gcc_assert (!defattrs->expr);
1605 gcc_assert (!defattrs->offset_known_p);
1607 /* Respect mode size. */
1608 attrs.size_known_p = defattrs->size_known_p;
1609 attrs.size = defattrs->size;
1610 /* ??? Is this really necessary? We probably should always get
1611 the size from the type below. */
1613 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1614 if T is an object, always compute the object alignment below. */
1616 attrs.align = defattrs->align;
1618 attrs.align = BITS_PER_UNIT;
1619 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1620 e.g. if the type carries an alignment attribute. Should we be
1621 able to simply always use TYPE_ALIGN? */
1624 /* We can set the alignment from the type if we are making an object,
1625 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1626 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1627 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1629 else if (TREE_CODE (t) == MEM_REF)
1631 tree op0 = TREE_OPERAND (t, 0);
1632 if (TREE_CODE (op0) == ADDR_EXPR
1633 && (DECL_P (TREE_OPERAND (op0, 0))
1634 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
1636 if (DECL_P (TREE_OPERAND (op0, 0)))
1637 attrs.align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1638 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1640 attrs.align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
1641 #ifdef CONSTANT_ALIGNMENT
1642 attrs.align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0),
1646 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1648 unsigned HOST_WIDE_INT ioff
1649 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1650 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1651 attrs.align = MIN (aoff, attrs.align);
1655 /* ??? This isn't fully correct, we can't set the alignment from the
1656 type in all cases. */
1657 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1660 else if (TREE_CODE (t) == TARGET_MEM_REF)
1661 /* ??? This isn't fully correct, we can't set the alignment from the
1662 type in all cases. */
1663 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1665 /* If the size is known, we can set that. */
1666 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1668 attrs.size_known_p = true;
1669 attrs.size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1672 /* If T is not a type, we may be able to deduce some more information about
1677 bool align_computed = false;
1679 if (TREE_THIS_VOLATILE (t))
1680 MEM_VOLATILE_P (ref) = 1;
1682 /* Now remove any conversions: they don't change what the underlying
1683 object is. Likewise for SAVE_EXPR. */
1684 while (CONVERT_EXPR_P (t)
1685 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1686 || TREE_CODE (t) == SAVE_EXPR)
1687 t = TREE_OPERAND (t, 0);
1689 /* Note whether this expression can trap. */
1690 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1692 base = get_base_address (t);
1693 if (base && DECL_P (base)
1694 && TREE_READONLY (base)
1695 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1696 && !TREE_THIS_VOLATILE (base))
1697 MEM_READONLY_P (ref) = 1;
1699 /* If this expression uses it's parent's alias set, mark it such
1700 that we won't change it. */
1701 if (component_uses_parent_alias_set (t))
1702 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1704 /* If this is a decl, set the attributes of the MEM from it. */
1708 attrs.offset_known_p = true;
1710 apply_bitpos = bitpos;
1711 if (DECL_SIZE_UNIT (t) && host_integerp (DECL_SIZE_UNIT (t), 1))
1713 attrs.size_known_p = true;
1714 attrs.size = tree_low_cst (DECL_SIZE_UNIT (t), 1);
1717 attrs.size_known_p = false;
1718 attrs.align = DECL_ALIGN (t);
1719 align_computed = true;
1722 /* If this is a constant, we know the alignment. */
1723 else if (CONSTANT_CLASS_P (t))
1725 attrs.align = TYPE_ALIGN (type);
1726 #ifdef CONSTANT_ALIGNMENT
1727 attrs.align = CONSTANT_ALIGNMENT (t, attrs.align);
1729 align_computed = true;
1732 /* If this is a field reference and not a bit-field, record it. */
1733 /* ??? There is some information that can be gleaned from bit-fields,
1734 such as the word offset in the structure that might be modified.
1735 But skip it for now. */
1736 else if (TREE_CODE (t) == COMPONENT_REF
1737 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1740 attrs.offset_known_p = true;
1742 apply_bitpos = bitpos;
1743 /* ??? Any reason the field size would be different than
1744 the size we got from the type? */
1747 /* If this is an array reference, look for an outer field reference. */
1748 else if (TREE_CODE (t) == ARRAY_REF)
1750 tree off_tree = size_zero_node;
1751 /* We can't modify t, because we use it at the end of the
1757 tree index = TREE_OPERAND (t2, 1);
1758 tree low_bound = array_ref_low_bound (t2);
1759 tree unit_size = array_ref_element_size (t2);
1761 /* We assume all arrays have sizes that are a multiple of a byte.
1762 First subtract the lower bound, if any, in the type of the
1763 index, then convert to sizetype and multiply by the size of
1764 the array element. */
1765 if (! integer_zerop (low_bound))
1766 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1769 off_tree = size_binop (PLUS_EXPR,
1770 size_binop (MULT_EXPR,
1771 fold_convert (sizetype,
1775 t2 = TREE_OPERAND (t2, 0);
1777 while (TREE_CODE (t2) == ARRAY_REF);
1782 attrs.offset_known_p = false;
1783 if (host_integerp (off_tree, 1))
1785 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1786 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1787 attrs.align = DECL_ALIGN (t2);
1788 if (aoff && (unsigned HOST_WIDE_INT) aoff < attrs.align)
1790 align_computed = true;
1791 attrs.offset_known_p = true;
1792 attrs.offset = ioff;
1793 apply_bitpos = bitpos;
1796 else if (TREE_CODE (t2) == COMPONENT_REF)
1799 attrs.offset_known_p = false;
1800 if (host_integerp (off_tree, 1))
1802 attrs.offset_known_p = true;
1803 attrs.offset = tree_low_cst (off_tree, 1);
1804 apply_bitpos = bitpos;
1806 /* ??? Any reason the field size would be different than
1807 the size we got from the type? */
1810 /* If this is an indirect reference, record it. */
1811 else if (TREE_CODE (t) == MEM_REF)
1814 attrs.offset_known_p = true;
1816 apply_bitpos = bitpos;
1820 /* If this is an indirect reference, record it. */
1821 else if (TREE_CODE (t) == MEM_REF
1822 || TREE_CODE (t) == TARGET_MEM_REF)
1825 attrs.offset_known_p = true;
1827 apply_bitpos = bitpos;
1830 if (!align_computed)
1832 unsigned int obj_align = get_object_alignment (t);
1833 attrs.align = MAX (attrs.align, obj_align);
1837 /* If we modified OFFSET based on T, then subtract the outstanding
1838 bit position offset. Similarly, increase the size of the accessed
1839 object to contain the negative offset. */
1842 gcc_assert (attrs.offset_known_p);
1843 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1844 if (attrs.size_known_p)
1845 attrs.size += apply_bitpos / BITS_PER_UNIT;
1848 /* Now set the attributes we computed above. */
1849 attrs.addrspace = TYPE_ADDR_SPACE (type);
1850 set_mem_attrs (ref, &attrs);
1852 /* If this is already known to be a scalar or aggregate, we are done. */
1853 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1856 /* If it is a reference into an aggregate, this is part of an aggregate.
1857 Otherwise we don't know. */
1858 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1859 || TREE_CODE (t) == ARRAY_RANGE_REF
1860 || TREE_CODE (t) == BIT_FIELD_REF)
1861 MEM_IN_STRUCT_P (ref) = 1;
1865 set_mem_attributes (rtx ref, tree t, int objectp)
1867 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1870 /* Set the alias set of MEM to SET. */
1873 set_mem_alias_set (rtx mem, alias_set_type set)
1875 struct mem_attrs attrs;
1877 /* If the new and old alias sets don't conflict, something is wrong. */
1878 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1879 attrs = *get_mem_attrs (mem);
1881 set_mem_attrs (mem, &attrs);
1884 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1887 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1889 struct mem_attrs attrs;
1891 attrs = *get_mem_attrs (mem);
1892 attrs.addrspace = addrspace;
1893 set_mem_attrs (mem, &attrs);
1896 /* Set the alignment of MEM to ALIGN bits. */
1899 set_mem_align (rtx mem, unsigned int align)
1901 struct mem_attrs attrs;
1903 attrs = *get_mem_attrs (mem);
1904 attrs.align = align;
1905 set_mem_attrs (mem, &attrs);
1908 /* Set the expr for MEM to EXPR. */
1911 set_mem_expr (rtx mem, tree expr)
1913 struct mem_attrs attrs;
1915 attrs = *get_mem_attrs (mem);
1917 set_mem_attrs (mem, &attrs);
1920 /* Set the offset of MEM to OFFSET. */
1923 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
1925 struct mem_attrs attrs;
1927 attrs = *get_mem_attrs (mem);
1928 attrs.offset_known_p = true;
1929 attrs.offset = offset;
1930 set_mem_attrs (mem, &attrs);
1933 /* Clear the offset of MEM. */
1936 clear_mem_offset (rtx mem)
1938 struct mem_attrs attrs;
1940 attrs = *get_mem_attrs (mem);
1941 attrs.offset_known_p = false;
1942 set_mem_attrs (mem, &attrs);
1945 /* Set the size of MEM to SIZE. */
1948 set_mem_size (rtx mem, HOST_WIDE_INT size)
1950 struct mem_attrs attrs;
1952 attrs = *get_mem_attrs (mem);
1953 attrs.size_known_p = true;
1955 set_mem_attrs (mem, &attrs);
1958 /* Clear the size of MEM. */
1961 clear_mem_size (rtx mem)
1963 struct mem_attrs attrs;
1965 attrs = *get_mem_attrs (mem);
1966 attrs.size_known_p = false;
1967 set_mem_attrs (mem, &attrs);
1970 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1971 and its address changed to ADDR. (VOIDmode means don't change the mode.
1972 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1973 returned memory location is required to be valid. The memory
1974 attributes are not changed. */
1977 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1982 gcc_assert (MEM_P (memref));
1983 as = MEM_ADDR_SPACE (memref);
1984 if (mode == VOIDmode)
1985 mode = GET_MODE (memref);
1987 addr = XEXP (memref, 0);
1988 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1989 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1994 if (reload_in_progress || reload_completed)
1995 gcc_assert (memory_address_addr_space_p (mode, addr, as));
1997 addr = memory_address_addr_space (mode, addr, as);
2000 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2003 new_rtx = gen_rtx_MEM (mode, addr);
2004 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2008 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2009 way we are changing MEMREF, so we only preserve the alias set. */
2012 change_address (rtx memref, enum machine_mode mode, rtx addr)
2014 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
2015 enum machine_mode mmode = GET_MODE (new_rtx);
2016 struct mem_attrs attrs, *defattrs;
2018 attrs = *get_mem_attrs (memref);
2019 defattrs = mode_mem_attrs[(int) mmode];
2020 attrs.expr = NULL_TREE;
2021 attrs.offset_known_p = false;
2022 attrs.size_known_p = defattrs->size_known_p;
2023 attrs.size = defattrs->size;
2024 attrs.align = defattrs->align;
2026 /* If there are no changes, just return the original memory reference. */
2027 if (new_rtx == memref)
2029 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2032 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2033 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2036 set_mem_attrs (new_rtx, &attrs);
2040 /* Return a memory reference like MEMREF, but with its mode changed
2041 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2042 nonzero, the memory address is forced to be valid.
2043 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2044 and caller is responsible for adjusting MEMREF base register. */
2047 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2048 int validate, int adjust)
2050 rtx addr = XEXP (memref, 0);
2052 enum machine_mode address_mode;
2054 struct mem_attrs attrs, *defattrs;
2055 unsigned HOST_WIDE_INT max_align;
2057 attrs = *get_mem_attrs (memref);
2059 /* If there are no changes, just return the original memory reference. */
2060 if (mode == GET_MODE (memref) && !offset
2061 && (!validate || memory_address_addr_space_p (mode, addr,
2065 /* ??? Prefer to create garbage instead of creating shared rtl.
2066 This may happen even if offset is nonzero -- consider
2067 (plus (plus reg reg) const_int) -- so do this always. */
2068 addr = copy_rtx (addr);
2070 /* Convert a possibly large offset to a signed value within the
2071 range of the target address space. */
2072 address_mode = targetm.addr_space.address_mode (attrs.addrspace);
2073 pbits = GET_MODE_BITSIZE (address_mode);
2074 if (HOST_BITS_PER_WIDE_INT > pbits)
2076 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2077 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2083 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2084 object, we can merge it into the LO_SUM. */
2085 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2087 && (unsigned HOST_WIDE_INT) offset
2088 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2089 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2090 plus_constant (XEXP (addr, 1), offset));
2092 addr = plus_constant (addr, offset);
2095 new_rtx = change_address_1 (memref, mode, addr, validate);
2097 /* If the address is a REG, change_address_1 rightfully returns memref,
2098 but this would destroy memref's MEM_ATTRS. */
2099 if (new_rtx == memref && offset != 0)
2100 new_rtx = copy_rtx (new_rtx);
2102 /* Compute the new values of the memory attributes due to this adjustment.
2103 We add the offsets and update the alignment. */
2104 if (attrs.offset_known_p)
2105 attrs.offset += offset;
2107 /* Compute the new alignment by taking the MIN of the alignment and the
2108 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2112 max_align = (offset & -offset) * BITS_PER_UNIT;
2113 attrs.align = MIN (attrs.align, max_align);
2116 /* We can compute the size in a number of ways. */
2117 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2118 if (defattrs->size_known_p)
2120 attrs.size_known_p = true;
2121 attrs.size = defattrs->size;
2123 else if (attrs.size_known_p)
2124 attrs.size -= offset;
2126 set_mem_attrs (new_rtx, &attrs);
2128 /* At some point, we should validate that this offset is within the object,
2129 if all the appropriate values are known. */
2133 /* Return a memory reference like MEMREF, but with its mode changed
2134 to MODE and its address changed to ADDR, which is assumed to be
2135 MEMREF offset by OFFSET bytes. If VALIDATE is
2136 nonzero, the memory address is forced to be valid. */
2139 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2140 HOST_WIDE_INT offset, int validate)
2142 memref = change_address_1 (memref, VOIDmode, addr, validate);
2143 return adjust_address_1 (memref, mode, offset, validate, 0);
2146 /* Return a memory reference like MEMREF, but whose address is changed by
2147 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2148 known to be in OFFSET (possibly 1). */
2151 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2153 rtx new_rtx, addr = XEXP (memref, 0);
2154 enum machine_mode address_mode;
2155 struct mem_attrs attrs, *defattrs;
2157 attrs = *get_mem_attrs (memref);
2158 address_mode = targetm.addr_space.address_mode (attrs.addrspace);
2159 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2161 /* At this point we don't know _why_ the address is invalid. It
2162 could have secondary memory references, multiplies or anything.
2164 However, if we did go and rearrange things, we can wind up not
2165 being able to recognize the magic around pic_offset_table_rtx.
2166 This stuff is fragile, and is yet another example of why it is
2167 bad to expose PIC machinery too early. */
2168 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2170 && GET_CODE (addr) == PLUS
2171 && XEXP (addr, 0) == pic_offset_table_rtx)
2173 addr = force_reg (GET_MODE (addr), addr);
2174 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2177 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2178 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2180 /* If there are no changes, just return the original memory reference. */
2181 if (new_rtx == memref)
2184 /* Update the alignment to reflect the offset. Reset the offset, which
2186 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2187 attrs.offset_known_p = false;
2188 attrs.size_known_p = defattrs->size_known_p;
2189 attrs.size = defattrs->size;
2190 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2191 set_mem_attrs (new_rtx, &attrs);
2195 /* Return a memory reference like MEMREF, but with its address changed to
2196 ADDR. The caller is asserting that the actual piece of memory pointed
2197 to is the same, just the form of the address is being changed, such as
2198 by putting something into a register. */
2201 replace_equiv_address (rtx memref, rtx addr)
2203 /* change_address_1 copies the memory attribute structure without change
2204 and that's exactly what we want here. */
2205 update_temp_slot_address (XEXP (memref, 0), addr);
2206 return change_address_1 (memref, VOIDmode, addr, 1);
2209 /* Likewise, but the reference is not required to be valid. */
2212 replace_equiv_address_nv (rtx memref, rtx addr)
2214 return change_address_1 (memref, VOIDmode, addr, 0);
2217 /* Return a memory reference like MEMREF, but with its mode widened to
2218 MODE and offset by OFFSET. This would be used by targets that e.g.
2219 cannot issue QImode memory operations and have to use SImode memory
2220 operations plus masking logic. */
2223 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2225 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2226 struct mem_attrs attrs;
2227 unsigned int size = GET_MODE_SIZE (mode);
2229 /* If there are no changes, just return the original memory reference. */
2230 if (new_rtx == memref)
2233 attrs = *get_mem_attrs (new_rtx);
2235 /* If we don't know what offset we were at within the expression, then
2236 we can't know if we've overstepped the bounds. */
2237 if (! attrs.offset_known_p)
2238 attrs.expr = NULL_TREE;
2242 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2244 tree field = TREE_OPERAND (attrs.expr, 1);
2245 tree offset = component_ref_field_offset (attrs.expr);
2247 if (! DECL_SIZE_UNIT (field))
2249 attrs.expr = NULL_TREE;
2253 /* Is the field at least as large as the access? If so, ok,
2254 otherwise strip back to the containing structure. */
2255 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2256 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2257 && attrs.offset >= 0)
2260 if (! host_integerp (offset, 1))
2262 attrs.expr = NULL_TREE;
2266 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2267 attrs.offset += tree_low_cst (offset, 1);
2268 attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2271 /* Similarly for the decl. */
2272 else if (DECL_P (attrs.expr)
2273 && DECL_SIZE_UNIT (attrs.expr)
2274 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2275 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2276 && (! attrs.offset_known_p || attrs.offset >= 0))
2280 /* The widened memory access overflows the expression, which means
2281 that it could alias another expression. Zap it. */
2282 attrs.expr = NULL_TREE;
2288 attrs.offset_known_p = false;
2290 /* The widened memory may alias other stuff, so zap the alias set. */
2291 /* ??? Maybe use get_alias_set on any remaining expression. */
2293 attrs.size_known_p = true;
2295 set_mem_attrs (new_rtx, &attrs);
2299 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2300 static GTY(()) tree spill_slot_decl;
2303 get_spill_slot_decl (bool force_build_p)
2305 tree d = spill_slot_decl;
2307 struct mem_attrs attrs;
2309 if (d || !force_build_p)
2312 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2313 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2314 DECL_ARTIFICIAL (d) = 1;
2315 DECL_IGNORED_P (d) = 1;
2317 spill_slot_decl = d;
2319 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2320 MEM_NOTRAP_P (rd) = 1;
2321 attrs = *mode_mem_attrs[(int) BLKmode];
2322 attrs.alias = new_alias_set ();
2324 set_mem_attrs (rd, &attrs);
2325 SET_DECL_RTL (d, rd);
2330 /* Given MEM, a result from assign_stack_local, fill in the memory
2331 attributes as appropriate for a register allocator spill slot.
2332 These slots are not aliasable by other memory. We arrange for
2333 them all to use a single MEM_EXPR, so that the aliasing code can
2334 work properly in the case of shared spill slots. */
2337 set_mem_attrs_for_spill (rtx mem)
2339 struct mem_attrs attrs;
2342 attrs = *get_mem_attrs (mem);
2343 attrs.expr = get_spill_slot_decl (true);
2344 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2345 attrs.addrspace = ADDR_SPACE_GENERIC;
2347 /* We expect the incoming memory to be of the form:
2348 (mem:MODE (plus (reg sfp) (const_int offset)))
2349 with perhaps the plus missing for offset = 0. */
2350 addr = XEXP (mem, 0);
2351 attrs.offset_known_p = true;
2353 if (GET_CODE (addr) == PLUS
2354 && CONST_INT_P (XEXP (addr, 1)))
2355 attrs.offset = INTVAL (XEXP (addr, 1));
2357 set_mem_attrs (mem, &attrs);
2358 MEM_NOTRAP_P (mem) = 1;
2361 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2364 gen_label_rtx (void)
2366 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2367 NULL, label_num++, NULL);
2370 /* For procedure integration. */
2372 /* Install new pointers to the first and last insns in the chain.
2373 Also, set cur_insn_uid to one higher than the last in use.
2374 Used for an inline-procedure after copying the insn chain. */
2377 set_new_first_and_last_insn (rtx first, rtx last)
2381 set_first_insn (first);
2382 set_last_insn (last);
2385 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2387 int debug_count = 0;
2389 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2390 cur_debug_insn_uid = 0;
2392 for (insn = first; insn; insn = NEXT_INSN (insn))
2393 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2394 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2397 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2398 if (DEBUG_INSN_P (insn))
2403 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2405 cur_debug_insn_uid++;
2408 for (insn = first; insn; insn = NEXT_INSN (insn))
2409 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2414 /* Go through all the RTL insn bodies and copy any invalid shared
2415 structure. This routine should only be called once. */
2418 unshare_all_rtl_1 (rtx insn)
2420 /* Unshare just about everything else. */
2421 unshare_all_rtl_in_chain (insn);
2423 /* Make sure the addresses of stack slots found outside the insn chain
2424 (such as, in DECL_RTL of a variable) are not shared
2425 with the insn chain.
2427 This special care is necessary when the stack slot MEM does not
2428 actually appear in the insn chain. If it does appear, its address
2429 is unshared from all else at that point. */
2430 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2433 /* Go through all the RTL insn bodies and copy any invalid shared
2434 structure, again. This is a fairly expensive thing to do so it
2435 should be done sparingly. */
2438 unshare_all_rtl_again (rtx insn)
2443 for (p = insn; p; p = NEXT_INSN (p))
2446 reset_used_flags (PATTERN (p));
2447 reset_used_flags (REG_NOTES (p));
2449 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2452 /* Make sure that virtual stack slots are not shared. */
2453 set_used_decls (DECL_INITIAL (cfun->decl));
2455 /* Make sure that virtual parameters are not shared. */
2456 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2457 set_used_flags (DECL_RTL (decl));
2459 reset_used_flags (stack_slot_list);
2461 unshare_all_rtl_1 (insn);
2465 unshare_all_rtl (void)
2467 unshare_all_rtl_1 (get_insns ());
2471 struct rtl_opt_pass pass_unshare_all_rtl =
2475 "unshare", /* name */
2477 unshare_all_rtl, /* execute */
2480 0, /* static_pass_number */
2481 TV_NONE, /* tv_id */
2482 0, /* properties_required */
2483 0, /* properties_provided */
2484 0, /* properties_destroyed */
2485 0, /* todo_flags_start */
2486 TODO_verify_rtl_sharing /* todo_flags_finish */
2491 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2492 Recursively does the same for subexpressions. */
2495 verify_rtx_sharing (rtx orig, rtx insn)
2500 const char *format_ptr;
2505 code = GET_CODE (x);
2507 /* These types may be freely shared. */
2527 /* SCRATCH must be shared because they represent distinct values. */
2529 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2534 if (shared_const_p (orig))
2539 /* A MEM is allowed to be shared if its address is constant. */
2540 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2541 || reload_completed || reload_in_progress)
2550 /* This rtx may not be shared. If it has already been seen,
2551 replace it with a copy of itself. */
2552 #ifdef ENABLE_CHECKING
2553 if (RTX_FLAG (x, used))
2555 error ("invalid rtl sharing found in the insn");
2557 error ("shared rtx");
2559 internal_error ("internal consistency failure");
2562 gcc_assert (!RTX_FLAG (x, used));
2564 RTX_FLAG (x, used) = 1;
2566 /* Now scan the subexpressions recursively. */
2568 format_ptr = GET_RTX_FORMAT (code);
2570 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2572 switch (*format_ptr++)
2575 verify_rtx_sharing (XEXP (x, i), insn);
2579 if (XVEC (x, i) != NULL)
2582 int len = XVECLEN (x, i);
2584 for (j = 0; j < len; j++)
2586 /* We allow sharing of ASM_OPERANDS inside single
2588 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2589 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2591 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2593 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2602 /* Go through all the RTL insn bodies and check that there is no unexpected
2603 sharing in between the subexpressions. */
2606 verify_rtl_sharing (void)
2610 timevar_push (TV_VERIFY_RTL_SHARING);
2612 for (p = get_insns (); p; p = NEXT_INSN (p))
2615 reset_used_flags (PATTERN (p));
2616 reset_used_flags (REG_NOTES (p));
2618 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2619 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2622 rtx q, sequence = PATTERN (p);
2624 for (i = 0; i < XVECLEN (sequence, 0); i++)
2626 q = XVECEXP (sequence, 0, i);
2627 gcc_assert (INSN_P (q));
2628 reset_used_flags (PATTERN (q));
2629 reset_used_flags (REG_NOTES (q));
2631 reset_used_flags (CALL_INSN_FUNCTION_USAGE (q));
2636 for (p = get_insns (); p; p = NEXT_INSN (p))
2639 verify_rtx_sharing (PATTERN (p), p);
2640 verify_rtx_sharing (REG_NOTES (p), p);
2642 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (p), p);
2645 timevar_pop (TV_VERIFY_RTL_SHARING);
2648 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2649 Assumes the mark bits are cleared at entry. */
2652 unshare_all_rtl_in_chain (rtx insn)
2654 for (; insn; insn = NEXT_INSN (insn))
2657 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2658 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2660 CALL_INSN_FUNCTION_USAGE (insn)
2661 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2665 /* Go through all virtual stack slots of a function and mark them as
2666 shared. We never replace the DECL_RTLs themselves with a copy,
2667 but expressions mentioned into a DECL_RTL cannot be shared with
2668 expressions in the instruction stream.
2670 Note that reload may convert pseudo registers into memories in-place.
2671 Pseudo registers are always shared, but MEMs never are. Thus if we
2672 reset the used flags on MEMs in the instruction stream, we must set
2673 them again on MEMs that appear in DECL_RTLs. */
2676 set_used_decls (tree blk)
2681 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2682 if (DECL_RTL_SET_P (t))
2683 set_used_flags (DECL_RTL (t));
2685 /* Now process sub-blocks. */
2686 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2690 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2691 Recursively does the same for subexpressions. Uses
2692 copy_rtx_if_shared_1 to reduce stack space. */
2695 copy_rtx_if_shared (rtx orig)
2697 copy_rtx_if_shared_1 (&orig);
2701 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2702 use. Recursively does the same for subexpressions. */
2705 copy_rtx_if_shared_1 (rtx *orig1)
2711 const char *format_ptr;
2715 /* Repeat is used to turn tail-recursion into iteration. */
2722 code = GET_CODE (x);
2724 /* These types may be freely shared. */
2743 /* SCRATCH must be shared because they represent distinct values. */
2746 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2751 if (shared_const_p (x))
2761 /* The chain of insns is not being copied. */
2768 /* This rtx may not be shared. If it has already been seen,
2769 replace it with a copy of itself. */
2771 if (RTX_FLAG (x, used))
2773 x = shallow_copy_rtx (x);
2776 RTX_FLAG (x, used) = 1;
2778 /* Now scan the subexpressions recursively.
2779 We can store any replaced subexpressions directly into X
2780 since we know X is not shared! Any vectors in X
2781 must be copied if X was copied. */
2783 format_ptr = GET_RTX_FORMAT (code);
2784 length = GET_RTX_LENGTH (code);
2787 for (i = 0; i < length; i++)
2789 switch (*format_ptr++)
2793 copy_rtx_if_shared_1 (last_ptr);
2794 last_ptr = &XEXP (x, i);
2798 if (XVEC (x, i) != NULL)
2801 int len = XVECLEN (x, i);
2803 /* Copy the vector iff I copied the rtx and the length
2805 if (copied && len > 0)
2806 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2808 /* Call recursively on all inside the vector. */
2809 for (j = 0; j < len; j++)
2812 copy_rtx_if_shared_1 (last_ptr);
2813 last_ptr = &XVECEXP (x, i, j);
2828 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2831 mark_used_flags (rtx x, int flag)
2835 const char *format_ptr;
2838 /* Repeat is used to turn tail-recursion into iteration. */
2843 code = GET_CODE (x);
2845 /* These types may be freely shared so we needn't do any resetting
2872 /* The chain of insns is not being copied. */
2879 RTX_FLAG (x, used) = flag;
2881 format_ptr = GET_RTX_FORMAT (code);
2882 length = GET_RTX_LENGTH (code);
2884 for (i = 0; i < length; i++)
2886 switch (*format_ptr++)
2894 mark_used_flags (XEXP (x, i), flag);
2898 for (j = 0; j < XVECLEN (x, i); j++)
2899 mark_used_flags (XVECEXP (x, i, j), flag);
2905 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2906 to look for shared sub-parts. */
2909 reset_used_flags (rtx x)
2911 mark_used_flags (x, 0);
2914 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2915 to look for shared sub-parts. */
2918 set_used_flags (rtx x)
2920 mark_used_flags (x, 1);
2923 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2924 Return X or the rtx for the pseudo reg the value of X was copied into.
2925 OTHER must be valid as a SET_DEST. */
2928 make_safe_from (rtx x, rtx other)
2931 switch (GET_CODE (other))
2934 other = SUBREG_REG (other);
2936 case STRICT_LOW_PART:
2939 other = XEXP (other, 0);
2948 && GET_CODE (x) != SUBREG)
2950 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2951 || reg_mentioned_p (other, x))))
2953 rtx temp = gen_reg_rtx (GET_MODE (x));
2954 emit_move_insn (temp, x);
2960 /* Emission of insns (adding them to the doubly-linked list). */
2962 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2965 get_last_insn_anywhere (void)
2967 struct sequence_stack *stack;
2968 if (get_last_insn ())
2969 return get_last_insn ();
2970 for (stack = seq_stack; stack; stack = stack->next)
2971 if (stack->last != 0)
2976 /* Return the first nonnote insn emitted in current sequence or current
2977 function. This routine looks inside SEQUENCEs. */
2980 get_first_nonnote_insn (void)
2982 rtx insn = get_insns ();
2987 for (insn = next_insn (insn);
2988 insn && NOTE_P (insn);
2989 insn = next_insn (insn))
2993 if (NONJUMP_INSN_P (insn)
2994 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2995 insn = XVECEXP (PATTERN (insn), 0, 0);
3002 /* Return the last nonnote insn emitted in current sequence or current
3003 function. This routine looks inside SEQUENCEs. */
3006 get_last_nonnote_insn (void)
3008 rtx insn = get_last_insn ();
3013 for (insn = previous_insn (insn);
3014 insn && NOTE_P (insn);
3015 insn = previous_insn (insn))
3019 if (NONJUMP_INSN_P (insn)
3020 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3021 insn = XVECEXP (PATTERN (insn), 0,
3022 XVECLEN (PATTERN (insn), 0) - 1);
3029 /* Return the number of actual (non-debug) insns emitted in this
3033 get_max_insn_count (void)
3035 int n = cur_insn_uid;
3037 /* The table size must be stable across -g, to avoid codegen
3038 differences due to debug insns, and not be affected by
3039 -fmin-insn-uid, to avoid excessive table size and to simplify
3040 debugging of -fcompare-debug failures. */
3041 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3042 n -= cur_debug_insn_uid;
3044 n -= MIN_NONDEBUG_INSN_UID;
3050 /* Return the next insn. If it is a SEQUENCE, return the first insn
3054 next_insn (rtx insn)
3058 insn = NEXT_INSN (insn);
3059 if (insn && NONJUMP_INSN_P (insn)
3060 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3061 insn = XVECEXP (PATTERN (insn), 0, 0);
3067 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3071 previous_insn (rtx insn)
3075 insn = PREV_INSN (insn);
3076 if (insn && NONJUMP_INSN_P (insn)
3077 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3078 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3084 /* Return the next insn after INSN that is not a NOTE. This routine does not
3085 look inside SEQUENCEs. */
3088 next_nonnote_insn (rtx insn)
3092 insn = NEXT_INSN (insn);
3093 if (insn == 0 || !NOTE_P (insn))
3100 /* Return the next insn after INSN that is not a NOTE, but stop the
3101 search before we enter another basic block. This routine does not
3102 look inside SEQUENCEs. */
3105 next_nonnote_insn_bb (rtx insn)
3109 insn = NEXT_INSN (insn);
3110 if (insn == 0 || !NOTE_P (insn))
3112 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3119 /* Return the previous insn before INSN that is not a NOTE. This routine does
3120 not look inside SEQUENCEs. */
3123 prev_nonnote_insn (rtx insn)
3127 insn = PREV_INSN (insn);
3128 if (insn == 0 || !NOTE_P (insn))
3135 /* Return the previous insn before INSN that is not a NOTE, but stop
3136 the search before we enter another basic block. This routine does
3137 not look inside SEQUENCEs. */
3140 prev_nonnote_insn_bb (rtx insn)
3144 insn = PREV_INSN (insn);
3145 if (insn == 0 || !NOTE_P (insn))
3147 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3154 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3155 routine does not look inside SEQUENCEs. */
3158 next_nondebug_insn (rtx insn)
3162 insn = NEXT_INSN (insn);
3163 if (insn == 0 || !DEBUG_INSN_P (insn))
3170 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3171 This routine does not look inside SEQUENCEs. */
3174 prev_nondebug_insn (rtx insn)
3178 insn = PREV_INSN (insn);
3179 if (insn == 0 || !DEBUG_INSN_P (insn))
3186 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3187 This routine does not look inside SEQUENCEs. */
3190 next_nonnote_nondebug_insn (rtx insn)
3194 insn = NEXT_INSN (insn);
3195 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3202 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3203 This routine does not look inside SEQUENCEs. */
3206 prev_nonnote_nondebug_insn (rtx insn)
3210 insn = PREV_INSN (insn);
3211 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3218 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3219 or 0, if there is none. This routine does not look inside
3223 next_real_insn (rtx insn)
3227 insn = NEXT_INSN (insn);
3228 if (insn == 0 || INSN_P (insn))
3235 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3236 or 0, if there is none. This routine does not look inside
3240 prev_real_insn (rtx insn)
3244 insn = PREV_INSN (insn);
3245 if (insn == 0 || INSN_P (insn))
3252 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3253 This routine does not look inside SEQUENCEs. */
3256 last_call_insn (void)
3260 for (insn = get_last_insn ();
3261 insn && !CALL_P (insn);
3262 insn = PREV_INSN (insn))
3268 /* Find the next insn after INSN that really does something. This routine
3269 does not look inside SEQUENCEs. After reload this also skips over
3270 standalone USE and CLOBBER insn. */
3273 active_insn_p (const_rtx insn)
3275 return (CALL_P (insn) || JUMP_P (insn)
3276 || (NONJUMP_INSN_P (insn)
3277 && (! reload_completed
3278 || (GET_CODE (PATTERN (insn)) != USE
3279 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3283 next_active_insn (rtx insn)
3287 insn = NEXT_INSN (insn);
3288 if (insn == 0 || active_insn_p (insn))
3295 /* Find the last insn before INSN that really does something. This routine
3296 does not look inside SEQUENCEs. After reload this also skips over
3297 standalone USE and CLOBBER insn. */
3300 prev_active_insn (rtx insn)
3304 insn = PREV_INSN (insn);
3305 if (insn == 0 || active_insn_p (insn))
3312 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3315 next_label (rtx insn)
3319 insn = NEXT_INSN (insn);
3320 if (insn == 0 || LABEL_P (insn))
3327 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3330 prev_label (rtx insn)
3334 insn = PREV_INSN (insn);
3335 if (insn == 0 || LABEL_P (insn))
3342 /* Return the last label to mark the same position as LABEL. Return LABEL
3343 itself if it is null or any return rtx. */
3346 skip_consecutive_labels (rtx label)
3350 if (label && ANY_RETURN_P (label))
3353 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3361 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3362 and REG_CC_USER notes so we can find it. */
3365 link_cc0_insns (rtx insn)
3367 rtx user = next_nonnote_insn (insn);
3369 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3370 user = XVECEXP (PATTERN (user), 0, 0);
3372 add_reg_note (user, REG_CC_SETTER, insn);
3373 add_reg_note (insn, REG_CC_USER, user);
3376 /* Return the next insn that uses CC0 after INSN, which is assumed to
3377 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3378 applied to the result of this function should yield INSN).
3380 Normally, this is simply the next insn. However, if a REG_CC_USER note
3381 is present, it contains the insn that uses CC0.
3383 Return 0 if we can't find the insn. */
3386 next_cc0_user (rtx insn)
3388 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3391 return XEXP (note, 0);
3393 insn = next_nonnote_insn (insn);
3394 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3395 insn = XVECEXP (PATTERN (insn), 0, 0);
3397 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3403 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3404 note, it is the previous insn. */
3407 prev_cc0_setter (rtx insn)
3409 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3412 return XEXP (note, 0);
3414 insn = prev_nonnote_insn (insn);
3415 gcc_assert (sets_cc0_p (PATTERN (insn)));
3422 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3425 find_auto_inc (rtx *xp, void *data)
3428 rtx reg = (rtx) data;
3430 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3433 switch (GET_CODE (x))
3441 if (rtx_equal_p (reg, XEXP (x, 0)))
3452 /* Increment the label uses for all labels present in rtx. */
3455 mark_label_nuses (rtx x)
3461 code = GET_CODE (x);
3462 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3463 LABEL_NUSES (XEXP (x, 0))++;
3465 fmt = GET_RTX_FORMAT (code);
3466 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3469 mark_label_nuses (XEXP (x, i));
3470 else if (fmt[i] == 'E')
3471 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3472 mark_label_nuses (XVECEXP (x, i, j));
3477 /* Try splitting insns that can be split for better scheduling.
3478 PAT is the pattern which might split.
3479 TRIAL is the insn providing PAT.
3480 LAST is nonzero if we should return the last insn of the sequence produced.
3482 If this routine succeeds in splitting, it returns the first or last
3483 replacement insn depending on the value of LAST. Otherwise, it
3484 returns TRIAL. If the insn to be returned can be split, it will be. */
3487 try_split (rtx pat, rtx trial, int last)
3489 rtx before = PREV_INSN (trial);
3490 rtx after = NEXT_INSN (trial);
3491 int has_barrier = 0;
3494 rtx insn_last, insn;
3497 /* We're not good at redistributing frame information. */
3498 if (RTX_FRAME_RELATED_P (trial))
3501 if (any_condjump_p (trial)
3502 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3503 split_branch_probability = INTVAL (XEXP (note, 0));
3504 probability = split_branch_probability;
3506 seq = split_insns (pat, trial);
3508 split_branch_probability = -1;
3510 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3511 We may need to handle this specially. */
3512 if (after && BARRIER_P (after))
3515 after = NEXT_INSN (after);
3521 /* Avoid infinite loop if any insn of the result matches
3522 the original pattern. */
3526 if (INSN_P (insn_last)
3527 && rtx_equal_p (PATTERN (insn_last), pat))
3529 if (!NEXT_INSN (insn_last))
3531 insn_last = NEXT_INSN (insn_last);
3534 /* We will be adding the new sequence to the function. The splitters
3535 may have introduced invalid RTL sharing, so unshare the sequence now. */
3536 unshare_all_rtl_in_chain (seq);
3539 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3543 mark_jump_label (PATTERN (insn), insn, 0);
3545 if (probability != -1
3546 && any_condjump_p (insn)
3547 && !find_reg_note (insn, REG_BR_PROB, 0))
3549 /* We can preserve the REG_BR_PROB notes only if exactly
3550 one jump is created, otherwise the machine description
3551 is responsible for this step using
3552 split_branch_probability variable. */
3553 gcc_assert (njumps == 1);
3554 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3559 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3560 in SEQ and copy any additional information across. */
3563 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3568 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3569 target may have explicitly specified. */
3570 p = &CALL_INSN_FUNCTION_USAGE (insn);
3573 *p = CALL_INSN_FUNCTION_USAGE (trial);
3575 /* If the old call was a sibling call, the new one must
3577 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3579 /* If the new call is the last instruction in the sequence,
3580 it will effectively replace the old call in-situ. Otherwise
3581 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3582 so that it comes immediately after the new call. */
3583 if (NEXT_INSN (insn))
3584 for (next = NEXT_INSN (trial);
3585 next && NOTE_P (next);
3586 next = NEXT_INSN (next))
3587 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3590 add_insn_after (next, insn, NULL);
3596 /* Copy notes, particularly those related to the CFG. */
3597 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3599 switch (REG_NOTE_KIND (note))
3602 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3607 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3610 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3614 case REG_NON_LOCAL_GOTO:
3615 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3618 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3624 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3626 rtx reg = XEXP (note, 0);
3627 if (!FIND_REG_INC_NOTE (insn, reg)
3628 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3629 add_reg_note (insn, REG_INC, reg);
3635 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3643 /* If there are LABELS inside the split insns increment the
3644 usage count so we don't delete the label. */
3648 while (insn != NULL_RTX)
3650 /* JUMP_P insns have already been "marked" above. */
3651 if (NONJUMP_INSN_P (insn))
3652 mark_label_nuses (PATTERN (insn));
3654 insn = PREV_INSN (insn);
3658 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3660 delete_insn (trial);
3662 emit_barrier_after (tem);
3664 /* Recursively call try_split for each new insn created; by the
3665 time control returns here that insn will be fully split, so
3666 set LAST and continue from the insn after the one returned.
3667 We can't use next_active_insn here since AFTER may be a note.
3668 Ignore deleted insns, which can be occur if not optimizing. */
3669 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3670 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3671 tem = try_split (PATTERN (tem), tem, 1);
3673 /* Return either the first or the last insn, depending on which was
3676 ? (after ? PREV_INSN (after) : get_last_insn ())
3677 : NEXT_INSN (before);
3680 /* Make and return an INSN rtx, initializing all its slots.
3681 Store PATTERN in the pattern slots. */
3684 make_insn_raw (rtx pattern)
3688 insn = rtx_alloc (INSN);
3690 INSN_UID (insn) = cur_insn_uid++;
3691 PATTERN (insn) = pattern;
3692 INSN_CODE (insn) = -1;
3693 REG_NOTES (insn) = NULL;
3694 INSN_LOCATOR (insn) = curr_insn_locator ();
3695 BLOCK_FOR_INSN (insn) = NULL;
3697 #ifdef ENABLE_RTL_CHECKING
3700 && (returnjump_p (insn)
3701 || (GET_CODE (insn) == SET
3702 && SET_DEST (insn) == pc_rtx)))
3704 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3712 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3715 make_debug_insn_raw (rtx pattern)
3719 insn = rtx_alloc (DEBUG_INSN);
3720 INSN_UID (insn) = cur_debug_insn_uid++;
3721 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3722 INSN_UID (insn) = cur_insn_uid++;
3724 PATTERN (insn) = pattern;
3725 INSN_CODE (insn) = -1;
3726 REG_NOTES (insn) = NULL;
3727 INSN_LOCATOR (insn) = curr_insn_locator ();
3728 BLOCK_FOR_INSN (insn) = NULL;
3733 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3736 make_jump_insn_raw (rtx pattern)
3740 insn = rtx_alloc (JUMP_INSN);
3741 INSN_UID (insn) = cur_insn_uid++;
3743 PATTERN (insn) = pattern;
3744 INSN_CODE (insn) = -1;
3745 REG_NOTES (insn) = NULL;
3746 JUMP_LABEL (insn) = NULL;
3747 INSN_LOCATOR (insn) = curr_insn_locator ();
3748 BLOCK_FOR_INSN (insn) = NULL;
3753 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3756 make_call_insn_raw (rtx pattern)
3760 insn = rtx_alloc (CALL_INSN);
3761 INSN_UID (insn) = cur_insn_uid++;
3763 PATTERN (insn) = pattern;
3764 INSN_CODE (insn) = -1;
3765 REG_NOTES (insn) = NULL;
3766 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3767 INSN_LOCATOR (insn) = curr_insn_locator ();
3768 BLOCK_FOR_INSN (insn) = NULL;
3773 /* Add INSN to the end of the doubly-linked list.
3774 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3779 PREV_INSN (insn) = get_last_insn();
3780 NEXT_INSN (insn) = 0;
3782 if (NULL != get_last_insn())
3783 NEXT_INSN (get_last_insn ()) = insn;
3785 if (NULL == get_insns ())
3786 set_first_insn (insn);
3788 set_last_insn (insn);
3791 /* Add INSN into the doubly-linked list after insn AFTER. This and
3792 the next should be the only functions called to insert an insn once
3793 delay slots have been filled since only they know how to update a
3797 add_insn_after (rtx insn, rtx after, basic_block bb)
3799 rtx next = NEXT_INSN (after);
3801 gcc_assert (!optimize || !INSN_DELETED_P (after));
3803 NEXT_INSN (insn) = next;
3804 PREV_INSN (insn) = after;
3808 PREV_INSN (next) = insn;
3809 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3810 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3812 else if (get_last_insn () == after)
3813 set_last_insn (insn);
3816 struct sequence_stack *stack = seq_stack;
3817 /* Scan all pending sequences too. */
3818 for (; stack; stack = stack->next)
3819 if (after == stack->last)
3828 if (!BARRIER_P (after)
3829 && !BARRIER_P (insn)
3830 && (bb = BLOCK_FOR_INSN (after)))
3832 set_block_for_insn (insn, bb);
3834 df_insn_rescan (insn);
3835 /* Should not happen as first in the BB is always
3836 either NOTE or LABEL. */
3837 if (BB_END (bb) == after
3838 /* Avoid clobbering of structure when creating new BB. */
3839 && !BARRIER_P (insn)
3840 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3844 NEXT_INSN (after) = insn;
3845 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3847 rtx sequence = PATTERN (after);
3848 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3852 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3853 the previous should be the only functions called to insert an insn
3854 once delay slots have been filled since only they know how to
3855 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3859 add_insn_before (rtx insn, rtx before, basic_block bb)
3861 rtx prev = PREV_INSN (before);
3863 gcc_assert (!optimize || !INSN_DELETED_P (before));
3865 PREV_INSN (insn) = prev;
3866 NEXT_INSN (insn) = before;
3870 NEXT_INSN (prev) = insn;
3871 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3873 rtx sequence = PATTERN (prev);
3874 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3877 else if (get_insns () == before)
3878 set_first_insn (insn);
3881 struct sequence_stack *stack = seq_stack;
3882 /* Scan all pending sequences too. */
3883 for (; stack; stack = stack->next)
3884 if (before == stack->first)
3886 stack->first = insn;
3894 && !BARRIER_P (before)
3895 && !BARRIER_P (insn))
3896 bb = BLOCK_FOR_INSN (before);
3900 set_block_for_insn (insn, bb);
3902 df_insn_rescan (insn);
3903 /* Should not happen as first in the BB is always either NOTE or
3905 gcc_assert (BB_HEAD (bb) != insn
3906 /* Avoid clobbering of structure when creating new BB. */
3908 || NOTE_INSN_BASIC_BLOCK_P (insn));
3911 PREV_INSN (before) = insn;
3912 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3913 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3917 /* Replace insn with an deleted instruction note. */
3920 set_insn_deleted (rtx insn)
3922 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3923 PUT_CODE (insn, NOTE);
3924 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3928 /* Remove an insn from its doubly-linked list. This function knows how
3929 to handle sequences. */
3931 remove_insn (rtx insn)
3933 rtx next = NEXT_INSN (insn);
3934 rtx prev = PREV_INSN (insn);
3937 /* Later in the code, the block will be marked dirty. */
3938 df_insn_delete (NULL, INSN_UID (insn));
3942 NEXT_INSN (prev) = next;
3943 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3945 rtx sequence = PATTERN (prev);
3946 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3949 else if (get_insns () == insn)
3952 PREV_INSN (next) = NULL;
3953 set_first_insn (next);
3957 struct sequence_stack *stack = seq_stack;
3958 /* Scan all pending sequences too. */
3959 for (; stack; stack = stack->next)
3960 if (insn == stack->first)
3962 stack->first = next;
3971 PREV_INSN (next) = prev;
3972 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3973 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3975 else if (get_last_insn () == insn)
3976 set_last_insn (prev);
3979 struct sequence_stack *stack = seq_stack;
3980 /* Scan all pending sequences too. */
3981 for (; stack; stack = stack->next)
3982 if (insn == stack->last)
3990 if (!BARRIER_P (insn)
3991 && (bb = BLOCK_FOR_INSN (insn)))
3993 if (NONDEBUG_INSN_P (insn))
3994 df_set_bb_dirty (bb);
3995 if (BB_HEAD (bb) == insn)
3997 /* Never ever delete the basic block note without deleting whole
3999 gcc_assert (!NOTE_P (insn));
4000 BB_HEAD (bb) = next;
4002 if (BB_END (bb) == insn)
4007 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4010 add_function_usage_to (rtx call_insn, rtx call_fusage)
4012 gcc_assert (call_insn && CALL_P (call_insn));
4014 /* Put the register usage information on the CALL. If there is already
4015 some usage information, put ours at the end. */
4016 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4020 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4021 link = XEXP (link, 1))
4024 XEXP (link, 1) = call_fusage;
4027 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4030 /* Delete all insns made since FROM.
4031 FROM becomes the new last instruction. */
4034 delete_insns_since (rtx from)
4039 NEXT_INSN (from) = 0;
4040 set_last_insn (from);
4043 /* This function is deprecated, please use sequences instead.
4045 Move a consecutive bunch of insns to a different place in the chain.
4046 The insns to be moved are those between FROM and TO.
4047 They are moved to a new position after the insn AFTER.
4048 AFTER must not be FROM or TO or any insn in between.
4050 This function does not know about SEQUENCEs and hence should not be
4051 called after delay-slot filling has been done. */
4054 reorder_insns_nobb (rtx from, rtx to, rtx after)
4056 #ifdef ENABLE_CHECKING
4058 for (x = from; x != to; x = NEXT_INSN (x))
4059 gcc_assert (after != x);
4060 gcc_assert (after != to);
4063 /* Splice this bunch out of where it is now. */
4064 if (PREV_INSN (from))
4065 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4067 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4068 if (get_last_insn () == to)
4069 set_last_insn (PREV_INSN (from));
4070 if (get_insns () == from)
4071 set_first_insn (NEXT_INSN (to));
4073 /* Make the new neighbors point to it and it to them. */
4074 if (NEXT_INSN (after))
4075 PREV_INSN (NEXT_INSN (after)) = to;
4077 NEXT_INSN (to) = NEXT_INSN (after);
4078 PREV_INSN (from) = after;
4079 NEXT_INSN (after) = from;
4080 if (after == get_last_insn())
4084 /* Same as function above, but take care to update BB boundaries. */
4086 reorder_insns (rtx from, rtx to, rtx after)
4088 rtx prev = PREV_INSN (from);
4089 basic_block bb, bb2;
4091 reorder_insns_nobb (from, to, after);
4093 if (!BARRIER_P (after)
4094 && (bb = BLOCK_FOR_INSN (after)))
4097 df_set_bb_dirty (bb);
4099 if (!BARRIER_P (from)
4100 && (bb2 = BLOCK_FOR_INSN (from)))
4102 if (BB_END (bb2) == to)
4103 BB_END (bb2) = prev;
4104 df_set_bb_dirty (bb2);
4107 if (BB_END (bb) == after)
4110 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4112 df_insn_change_bb (x, bb);
4117 /* Emit insn(s) of given code and pattern
4118 at a specified place within the doubly-linked list.
4120 All of the emit_foo global entry points accept an object
4121 X which is either an insn list or a PATTERN of a single
4124 There are thus a few canonical ways to generate code and
4125 emit it at a specific place in the instruction stream. For
4126 example, consider the instruction named SPOT and the fact that
4127 we would like to emit some instructions before SPOT. We might
4131 ... emit the new instructions ...
4132 insns_head = get_insns ();
4135 emit_insn_before (insns_head, SPOT);
4137 It used to be common to generate SEQUENCE rtl instead, but that
4138 is a relic of the past which no longer occurs. The reason is that
4139 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4140 generated would almost certainly die right after it was created. */
4143 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4144 rtx (*make_raw) (rtx))
4148 gcc_assert (before);
4153 switch (GET_CODE (x))
4165 rtx next = NEXT_INSN (insn);
4166 add_insn_before (insn, before, bb);
4172 #ifdef ENABLE_RTL_CHECKING
4179 last = (*make_raw) (x);
4180 add_insn_before (last, before, bb);
4187 /* Make X be output before the instruction BEFORE. */
4190 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4192 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4195 /* Make an instruction with body X and code JUMP_INSN
4196 and output it before the instruction BEFORE. */
4199 emit_jump_insn_before_noloc (rtx x, rtx before)
4201 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4202 make_jump_insn_raw);
4205 /* Make an instruction with body X and code CALL_INSN
4206 and output it before the instruction BEFORE. */
4209 emit_call_insn_before_noloc (rtx x, rtx before)
4211 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4212 make_call_insn_raw);
4215 /* Make an instruction with body X and code DEBUG_INSN
4216 and output it before the instruction BEFORE. */
4219 emit_debug_insn_before_noloc (rtx x, rtx before)
4221 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4222 make_debug_insn_raw);
4225 /* Make an insn of code BARRIER
4226 and output it before the insn BEFORE. */
4229 emit_barrier_before (rtx before)
4231 rtx insn = rtx_alloc (BARRIER);
4233 INSN_UID (insn) = cur_insn_uid++;
4235 add_insn_before (insn, before, NULL);
4239 /* Emit the label LABEL before the insn BEFORE. */
4242 emit_label_before (rtx label, rtx before)
4244 /* This can be called twice for the same label as a result of the
4245 confusion that follows a syntax error! So make it harmless. */
4246 if (INSN_UID (label) == 0)
4248 INSN_UID (label) = cur_insn_uid++;
4249 add_insn_before (label, before, NULL);
4255 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4258 emit_note_before (enum insn_note subtype, rtx before)
4260 rtx note = rtx_alloc (NOTE);
4261 INSN_UID (note) = cur_insn_uid++;
4262 NOTE_KIND (note) = subtype;
4263 BLOCK_FOR_INSN (note) = NULL;
4264 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4266 add_insn_before (note, before, NULL);
4270 /* Helper for emit_insn_after, handles lists of instructions
4274 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4278 if (!bb && !BARRIER_P (after))
4279 bb = BLOCK_FOR_INSN (after);
4283 df_set_bb_dirty (bb);
4284 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4285 if (!BARRIER_P (last))
4287 set_block_for_insn (last, bb);
4288 df_insn_rescan (last);
4290 if (!BARRIER_P (last))
4292 set_block_for_insn (last, bb);
4293 df_insn_rescan (last);
4295 if (BB_END (bb) == after)
4299 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4302 after_after = NEXT_INSN (after);
4304 NEXT_INSN (after) = first;
4305 PREV_INSN (first) = after;
4306 NEXT_INSN (last) = after_after;
4308 PREV_INSN (after_after) = last;
4310 if (after == get_last_insn())
4311 set_last_insn (last);
4317 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4318 rtx (*make_raw)(rtx))
4327 switch (GET_CODE (x))
4336 last = emit_insn_after_1 (x, after, bb);
4339 #ifdef ENABLE_RTL_CHECKING
4346 last = (*make_raw) (x);
4347 add_insn_after (last, after, bb);
4354 /* Make X be output after the insn AFTER and set the BB of insn. If
4355 BB is NULL, an attempt is made to infer the BB from AFTER. */
4358 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4360 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4364 /* Make an insn of code JUMP_INSN with body X
4365 and output it after the insn AFTER. */
4368 emit_jump_insn_after_noloc (rtx x, rtx after)
4370 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4373 /* Make an instruction with body X and code CALL_INSN
4374 and output it after the instruction AFTER. */
4377 emit_call_insn_after_noloc (rtx x, rtx after)
4379 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4382 /* Make an instruction with body X and code CALL_INSN
4383 and output it after the instruction AFTER. */
4386 emit_debug_insn_after_noloc (rtx x, rtx after)
4388 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4391 /* Make an insn of code BARRIER
4392 and output it after the insn AFTER. */
4395 emit_barrier_after (rtx after)
4397 rtx insn = rtx_alloc (BARRIER);
4399 INSN_UID (insn) = cur_insn_uid++;
4401 add_insn_after (insn, after, NULL);
4405 /* Emit the label LABEL after the insn AFTER. */
4408 emit_label_after (rtx label, rtx after)
4410 /* This can be called twice for the same label
4411 as a result of the confusion that follows a syntax error!
4412 So make it harmless. */
4413 if (INSN_UID (label) == 0)
4415 INSN_UID (label) = cur_insn_uid++;
4416 add_insn_after (label, after, NULL);
4422 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4425 emit_note_after (enum insn_note subtype, rtx after)
4427 rtx note = rtx_alloc (NOTE);
4428 INSN_UID (note) = cur_insn_uid++;
4429 NOTE_KIND (note) = subtype;
4430 BLOCK_FOR_INSN (note) = NULL;
4431 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4432 add_insn_after (note, after, NULL);
4436 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4437 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4440 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4441 rtx (*make_raw) (rtx))
4443 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4445 if (pattern == NULL_RTX || !loc)
4448 after = NEXT_INSN (after);
4451 if (active_insn_p (after) && !INSN_LOCATOR (after))
4452 INSN_LOCATOR (after) = loc;
4455 after = NEXT_INSN (after);
4460 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4461 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4465 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4466 rtx (*make_raw) (rtx))
4470 if (skip_debug_insns)
4471 while (DEBUG_INSN_P (prev))
4472 prev = PREV_INSN (prev);
4475 return emit_pattern_after_setloc (pattern, after, INSN_LOCATOR (prev),
4478 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4481 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4483 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4485 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4488 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4490 emit_insn_after (rtx pattern, rtx after)
4492 return emit_pattern_after (pattern, after, true, make_insn_raw);
4495 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4497 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4499 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4502 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4504 emit_jump_insn_after (rtx pattern, rtx after)
4506 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4509 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4511 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4513 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4516 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4518 emit_call_insn_after (rtx pattern, rtx after)
4520 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4523 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4525 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4527 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4530 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4532 emit_debug_insn_after (rtx pattern, rtx after)
4534 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4537 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4538 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4539 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4543 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4544 rtx (*make_raw) (rtx))
4546 rtx first = PREV_INSN (before);
4547 rtx last = emit_pattern_before_noloc (pattern, before,
4548 insnp ? before : NULL_RTX,
4551 if (pattern == NULL_RTX || !loc)
4555 first = get_insns ();
4557 first = NEXT_INSN (first);
4560 if (active_insn_p (first) && !INSN_LOCATOR (first))
4561 INSN_LOCATOR (first) = loc;
4564 first = NEXT_INSN (first);
4569 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4570 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4571 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4572 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4575 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4576 bool insnp, rtx (*make_raw) (rtx))
4580 if (skip_debug_insns)
4581 while (DEBUG_INSN_P (next))
4582 next = PREV_INSN (next);
4585 return emit_pattern_before_setloc (pattern, before, INSN_LOCATOR (next),
4588 return emit_pattern_before_noloc (pattern, before,
4589 insnp ? before : NULL_RTX,
4593 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4595 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4597 return emit_pattern_before_setloc (pattern, before, loc, true,
4601 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4603 emit_insn_before (rtx pattern, rtx before)
4605 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4608 /* like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4610 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4612 return emit_pattern_before_setloc (pattern, before, loc, false,
4613 make_jump_insn_raw);
4616 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4618 emit_jump_insn_before (rtx pattern, rtx before)
4620 return emit_pattern_before (pattern, before, true, false,
4621 make_jump_insn_raw);
4624 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4626 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4628 return emit_pattern_before_setloc (pattern, before, loc, false,
4629 make_call_insn_raw);
4632 /* Like emit_call_insn_before_noloc,
4633 but set insn_locator according to BEFORE. */
4635 emit_call_insn_before (rtx pattern, rtx before)
4637 return emit_pattern_before (pattern, before, true, false,
4638 make_call_insn_raw);
4641 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4643 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4645 return emit_pattern_before_setloc (pattern, before, loc, false,
4646 make_debug_insn_raw);
4649 /* Like emit_debug_insn_before_noloc,
4650 but set insn_locator according to BEFORE. */
4652 emit_debug_insn_before (rtx pattern, rtx before)
4654 return emit_pattern_before (pattern, before, false, false,
4655 make_debug_insn_raw);
4658 /* Take X and emit it at the end of the doubly-linked
4661 Returns the last insn emitted. */
4666 rtx last = get_last_insn();
4672 switch (GET_CODE (x))
4684 rtx next = NEXT_INSN (insn);
4691 #ifdef ENABLE_RTL_CHECKING
4698 last = make_insn_raw (x);
4706 /* Make an insn of code DEBUG_INSN with pattern X
4707 and add it to the end of the doubly-linked list. */
4710 emit_debug_insn (rtx x)
4712 rtx last = get_last_insn();
4718 switch (GET_CODE (x))
4730 rtx next = NEXT_INSN (insn);
4737 #ifdef ENABLE_RTL_CHECKING
4744 last = make_debug_insn_raw (x);
4752 /* Make an insn of code JUMP_INSN with pattern X
4753 and add it to the end of the doubly-linked list. */
4756 emit_jump_insn (rtx x)
4758 rtx last = NULL_RTX, insn;
4760 switch (GET_CODE (x))
4772 rtx next = NEXT_INSN (insn);
4779 #ifdef ENABLE_RTL_CHECKING
4786 last = make_jump_insn_raw (x);
4794 /* Make an insn of code CALL_INSN with pattern X
4795 and add it to the end of the doubly-linked list. */
4798 emit_call_insn (rtx x)
4802 switch (GET_CODE (x))
4811 insn = emit_insn (x);
4814 #ifdef ENABLE_RTL_CHECKING
4821 insn = make_call_insn_raw (x);
4829 /* Add the label LABEL to the end of the doubly-linked list. */
4832 emit_label (rtx label)
4834 /* This can be called twice for the same label
4835 as a result of the confusion that follows a syntax error!
4836 So make it harmless. */
4837 if (INSN_UID (label) == 0)
4839 INSN_UID (label) = cur_insn_uid++;
4845 /* Make an insn of code BARRIER
4846 and add it to the end of the doubly-linked list. */
4851 rtx barrier = rtx_alloc (BARRIER);
4852 INSN_UID (barrier) = cur_insn_uid++;
4857 /* Emit a copy of note ORIG. */
4860 emit_note_copy (rtx orig)
4864 note = rtx_alloc (NOTE);
4866 INSN_UID (note) = cur_insn_uid++;
4867 NOTE_DATA (note) = NOTE_DATA (orig);
4868 NOTE_KIND (note) = NOTE_KIND (orig);
4869 BLOCK_FOR_INSN (note) = NULL;
4875 /* Make an insn of code NOTE or type NOTE_NO
4876 and add it to the end of the doubly-linked list. */
4879 emit_note (enum insn_note kind)
4883 note = rtx_alloc (NOTE);
4884 INSN_UID (note) = cur_insn_uid++;
4885 NOTE_KIND (note) = kind;
4886 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4887 BLOCK_FOR_INSN (note) = NULL;
4892 /* Emit a clobber of lvalue X. */
4895 emit_clobber (rtx x)
4897 /* CONCATs should not appear in the insn stream. */
4898 if (GET_CODE (x) == CONCAT)
4900 emit_clobber (XEXP (x, 0));
4901 return emit_clobber (XEXP (x, 1));
4903 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4906 /* Return a sequence of insns to clobber lvalue X. */
4920 /* Emit a use of rvalue X. */
4925 /* CONCATs should not appear in the insn stream. */
4926 if (GET_CODE (x) == CONCAT)
4928 emit_use (XEXP (x, 0));
4929 return emit_use (XEXP (x, 1));
4931 return emit_insn (gen_rtx_USE (VOIDmode, x));
4934 /* Return a sequence of insns to use rvalue X. */
4948 /* Cause next statement to emit a line note even if the line number
4952 force_next_line_note (void)
4957 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4958 note of this type already exists, remove it first. */
4961 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4963 rtx note = find_reg_note (insn, kind, NULL_RTX);
4969 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4970 has multiple sets (some callers assume single_set
4971 means the insn only has one set, when in fact it
4972 means the insn only has one * useful * set). */
4973 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4979 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4980 It serves no useful purpose and breaks eliminate_regs. */
4981 if (GET_CODE (datum) == ASM_OPERANDS)
4986 XEXP (note, 0) = datum;
4987 df_notes_rescan (insn);
4995 XEXP (note, 0) = datum;
5001 add_reg_note (insn, kind, datum);
5007 df_notes_rescan (insn);
5013 return REG_NOTES (insn);
5016 /* Return an indication of which type of insn should have X as a body.
5017 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5019 static enum rtx_code
5020 classify_insn (rtx x)
5024 if (GET_CODE (x) == CALL)
5026 if (ANY_RETURN_P (x))
5028 if (GET_CODE (x) == SET)
5030 if (SET_DEST (x) == pc_rtx)
5032 else if (GET_CODE (SET_SRC (x)) == CALL)
5037 if (GET_CODE (x) == PARALLEL)
5040 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5041 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5043 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5044 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5046 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5047 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5053 /* Emit the rtl pattern X as an appropriate kind of insn.
5054 If X is a label, it is simply added into the insn chain. */
5059 enum rtx_code code = classify_insn (x);
5064 return emit_label (x);
5066 return emit_insn (x);
5069 rtx insn = emit_jump_insn (x);
5070 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5071 return emit_barrier ();
5075 return emit_call_insn (x);
5077 return emit_debug_insn (x);
5083 /* Space for free sequence stack entries. */
5084 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5086 /* Begin emitting insns to a sequence. If this sequence will contain
5087 something that might cause the compiler to pop arguments to function
5088 calls (because those pops have previously been deferred; see
5089 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5090 before calling this function. That will ensure that the deferred
5091 pops are not accidentally emitted in the middle of this sequence. */
5094 start_sequence (void)
5096 struct sequence_stack *tem;
5098 if (free_sequence_stack != NULL)
5100 tem = free_sequence_stack;
5101 free_sequence_stack = tem->next;
5104 tem = ggc_alloc_sequence_stack ();
5106 tem->next = seq_stack;
5107 tem->first = get_insns ();
5108 tem->last = get_last_insn ();
5116 /* Set up the insn chain starting with FIRST as the current sequence,
5117 saving the previously current one. See the documentation for
5118 start_sequence for more information about how to use this function. */
5121 push_to_sequence (rtx first)
5127 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5130 set_first_insn (first);
5131 set_last_insn (last);
5134 /* Like push_to_sequence, but take the last insn as an argument to avoid
5135 looping through the list. */
5138 push_to_sequence2 (rtx first, rtx last)
5142 set_first_insn (first);
5143 set_last_insn (last);
5146 /* Set up the outer-level insn chain
5147 as the current sequence, saving the previously current one. */
5150 push_topmost_sequence (void)
5152 struct sequence_stack *stack, *top = NULL;
5156 for (stack = seq_stack; stack; stack = stack->next)
5159 set_first_insn (top->first);
5160 set_last_insn (top->last);
5163 /* After emitting to the outer-level insn chain, update the outer-level
5164 insn chain, and restore the previous saved state. */
5167 pop_topmost_sequence (void)
5169 struct sequence_stack *stack, *top = NULL;
5171 for (stack = seq_stack; stack; stack = stack->next)
5174 top->first = get_insns ();
5175 top->last = get_last_insn ();
5180 /* After emitting to a sequence, restore previous saved state.
5182 To get the contents of the sequence just made, you must call
5183 `get_insns' *before* calling here.
5185 If the compiler might have deferred popping arguments while
5186 generating this sequence, and this sequence will not be immediately
5187 inserted into the instruction stream, use do_pending_stack_adjust
5188 before calling get_insns. That will ensure that the deferred
5189 pops are inserted into this sequence, and not into some random
5190 location in the instruction stream. See INHIBIT_DEFER_POP for more
5191 information about deferred popping of arguments. */
5196 struct sequence_stack *tem = seq_stack;
5198 set_first_insn (tem->first);
5199 set_last_insn (tem->last);
5200 seq_stack = tem->next;
5202 memset (tem, 0, sizeof (*tem));
5203 tem->next = free_sequence_stack;
5204 free_sequence_stack = tem;
5207 /* Return 1 if currently emitting into a sequence. */
5210 in_sequence_p (void)
5212 return seq_stack != 0;
5215 /* Put the various virtual registers into REGNO_REG_RTX. */
5218 init_virtual_regs (void)
5220 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5221 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5222 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5223 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5224 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5225 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5226 = virtual_preferred_stack_boundary_rtx;
5230 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5231 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5232 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5233 static int copy_insn_n_scratches;
5235 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5236 copied an ASM_OPERANDS.
5237 In that case, it is the original input-operand vector. */
5238 static rtvec orig_asm_operands_vector;
5240 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5241 copied an ASM_OPERANDS.
5242 In that case, it is the copied input-operand vector. */
5243 static rtvec copy_asm_operands_vector;
5245 /* Likewise for the constraints vector. */
5246 static rtvec orig_asm_constraints_vector;
5247 static rtvec copy_asm_constraints_vector;
5249 /* Recursively create a new copy of an rtx for copy_insn.
5250 This function differs from copy_rtx in that it handles SCRATCHes and
5251 ASM_OPERANDs properly.
5252 Normally, this function is not used directly; use copy_insn as front end.
5253 However, you could first copy an insn pattern with copy_insn and then use
5254 this function afterwards to properly copy any REG_NOTEs containing
5258 copy_insn_1 (rtx orig)
5263 const char *format_ptr;
5268 code = GET_CODE (orig);
5286 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5291 for (i = 0; i < copy_insn_n_scratches; i++)
5292 if (copy_insn_scratch_in[i] == orig)
5293 return copy_insn_scratch_out[i];
5297 if (shared_const_p (orig))
5301 /* A MEM with a constant address is not sharable. The problem is that
5302 the constant address may need to be reloaded. If the mem is shared,
5303 then reloading one copy of this mem will cause all copies to appear
5304 to have been reloaded. */
5310 /* Copy the various flags, fields, and other information. We assume
5311 that all fields need copying, and then clear the fields that should
5312 not be copied. That is the sensible default behavior, and forces
5313 us to explicitly document why we are *not* copying a flag. */
5314 copy = shallow_copy_rtx (orig);
5316 /* We do not copy the USED flag, which is used as a mark bit during
5317 walks over the RTL. */
5318 RTX_FLAG (copy, used) = 0;
5320 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5323 RTX_FLAG (copy, jump) = 0;
5324 RTX_FLAG (copy, call) = 0;
5325 RTX_FLAG (copy, frame_related) = 0;
5328 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5330 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5331 switch (*format_ptr++)
5334 if (XEXP (orig, i) != NULL)
5335 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5340 if (XVEC (orig, i) == orig_asm_constraints_vector)
5341 XVEC (copy, i) = copy_asm_constraints_vector;
5342 else if (XVEC (orig, i) == orig_asm_operands_vector)
5343 XVEC (copy, i) = copy_asm_operands_vector;
5344 else if (XVEC (orig, i) != NULL)
5346 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5347 for (j = 0; j < XVECLEN (copy, i); j++)
5348 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5359 /* These are left unchanged. */
5366 if (code == SCRATCH)
5368 i = copy_insn_n_scratches++;
5369 gcc_assert (i < MAX_RECOG_OPERANDS);
5370 copy_insn_scratch_in[i] = orig;
5371 copy_insn_scratch_out[i] = copy;
5373 else if (code == ASM_OPERANDS)
5375 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5376 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5377 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5378 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5384 /* Create a new copy of an rtx.
5385 This function differs from copy_rtx in that it handles SCRATCHes and
5386 ASM_OPERANDs properly.
5387 INSN doesn't really have to be a full INSN; it could be just the
5390 copy_insn (rtx insn)
5392 copy_insn_n_scratches = 0;
5393 orig_asm_operands_vector = 0;
5394 orig_asm_constraints_vector = 0;
5395 copy_asm_operands_vector = 0;
5396 copy_asm_constraints_vector = 0;
5397 return copy_insn_1 (insn);
5400 /* Initialize data structures and variables in this file
5401 before generating rtl for each function. */
5406 set_first_insn (NULL);
5407 set_last_insn (NULL);
5408 if (MIN_NONDEBUG_INSN_UID)
5409 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5412 cur_debug_insn_uid = 1;
5413 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5414 last_location = UNKNOWN_LOCATION;
5415 first_label_num = label_num;
5418 /* Init the tables that describe all the pseudo regs. */
5420 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5422 crtl->emit.regno_pointer_align
5423 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5425 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5427 /* Put copies of all the hard registers into regno_reg_rtx. */
5428 memcpy (regno_reg_rtx,
5429 initial_regno_reg_rtx,
5430 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5432 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5433 init_virtual_regs ();
5435 /* Indicate that the virtual registers and stack locations are
5437 REG_POINTER (stack_pointer_rtx) = 1;
5438 REG_POINTER (frame_pointer_rtx) = 1;
5439 REG_POINTER (hard_frame_pointer_rtx) = 1;
5440 REG_POINTER (arg_pointer_rtx) = 1;
5442 REG_POINTER (virtual_incoming_args_rtx) = 1;
5443 REG_POINTER (virtual_stack_vars_rtx) = 1;
5444 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5445 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5446 REG_POINTER (virtual_cfa_rtx) = 1;
5448 #ifdef STACK_BOUNDARY
5449 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5450 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5451 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5452 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5454 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5455 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5456 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5457 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5458 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5461 #ifdef INIT_EXPANDERS
5466 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5469 gen_const_vector (enum machine_mode mode, int constant)
5474 enum machine_mode inner;
5476 units = GET_MODE_NUNITS (mode);
5477 inner = GET_MODE_INNER (mode);
5479 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5481 v = rtvec_alloc (units);
5483 /* We need to call this function after we set the scalar const_tiny_rtx
5485 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5487 for (i = 0; i < units; ++i)
5488 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5490 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5494 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5495 all elements are zero, and the one vector when all elements are one. */
5497 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5499 enum machine_mode inner = GET_MODE_INNER (mode);
5500 int nunits = GET_MODE_NUNITS (mode);
5504 /* Check to see if all of the elements have the same value. */
5505 x = RTVEC_ELT (v, nunits - 1);
5506 for (i = nunits - 2; i >= 0; i--)
5507 if (RTVEC_ELT (v, i) != x)
5510 /* If the values are all the same, check to see if we can use one of the
5511 standard constant vectors. */
5514 if (x == CONST0_RTX (inner))
5515 return CONST0_RTX (mode);
5516 else if (x == CONST1_RTX (inner))
5517 return CONST1_RTX (mode);
5518 else if (x == CONSTM1_RTX (inner))
5519 return CONSTM1_RTX (mode);
5522 return gen_rtx_raw_CONST_VECTOR (mode, v);
5525 /* Initialise global register information required by all functions. */
5528 init_emit_regs (void)
5531 enum machine_mode mode;
5534 /* Reset register attributes */
5535 htab_empty (reg_attrs_htab);
5537 /* We need reg_raw_mode, so initialize the modes now. */
5538 init_reg_modes_target ();
5540 /* Assign register numbers to the globally defined register rtx. */
5541 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5542 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5543 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5544 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
5545 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5546 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5547 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5548 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5549 virtual_incoming_args_rtx =
5550 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5551 virtual_stack_vars_rtx =
5552 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5553 virtual_stack_dynamic_rtx =
5554 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5555 virtual_outgoing_args_rtx =
5556 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5557 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5558 virtual_preferred_stack_boundary_rtx =
5559 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5561 /* Initialize RTL for commonly used hard registers. These are
5562 copied into regno_reg_rtx as we begin to compile each function. */
5563 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5564 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5566 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5567 return_address_pointer_rtx
5568 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5571 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5572 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5574 pic_offset_table_rtx = NULL_RTX;
5576 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5578 mode = (enum machine_mode) i;
5579 attrs = ggc_alloc_cleared_mem_attrs ();
5580 attrs->align = BITS_PER_UNIT;
5581 attrs->addrspace = ADDR_SPACE_GENERIC;
5582 if (mode != BLKmode)
5584 attrs->size_known_p = true;
5585 attrs->size = GET_MODE_SIZE (mode);
5586 if (STRICT_ALIGNMENT)
5587 attrs->align = GET_MODE_ALIGNMENT (mode);
5589 mode_mem_attrs[i] = attrs;
5593 /* Create some permanent unique rtl objects shared between all functions. */
5596 init_emit_once (void)
5599 enum machine_mode mode;
5600 enum machine_mode double_mode;
5602 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5604 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5605 const_int_htab_eq, NULL);
5607 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5608 const_double_htab_eq, NULL);
5610 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5611 const_fixed_htab_eq, NULL);
5613 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5614 mem_attrs_htab_eq, NULL);
5615 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5616 reg_attrs_htab_eq, NULL);
5618 /* Compute the word and byte modes. */
5620 byte_mode = VOIDmode;
5621 word_mode = VOIDmode;
5622 double_mode = VOIDmode;
5624 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5626 mode = GET_MODE_WIDER_MODE (mode))
5628 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5629 && byte_mode == VOIDmode)
5632 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5633 && word_mode == VOIDmode)
5637 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5639 mode = GET_MODE_WIDER_MODE (mode))
5641 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5642 && double_mode == VOIDmode)
5646 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5648 #ifdef INIT_EXPANDERS
5649 /* This is to initialize {init|mark|free}_machine_status before the first
5650 call to push_function_context_to. This is needed by the Chill front
5651 end which calls push_function_context_to before the first call to
5652 init_function_start. */
5656 /* Create the unique rtx's for certain rtx codes and operand values. */
5658 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5659 tries to use these variables. */
5660 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5661 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5662 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5664 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5665 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5666 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5668 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5670 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5671 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5672 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5677 dconsthalf = dconst1;
5678 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5680 for (i = 0; i < 3; i++)
5682 const REAL_VALUE_TYPE *const r =
5683 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5685 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5687 mode = GET_MODE_WIDER_MODE (mode))
5688 const_tiny_rtx[i][(int) mode] =
5689 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5691 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5693 mode = GET_MODE_WIDER_MODE (mode))
5694 const_tiny_rtx[i][(int) mode] =
5695 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5697 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5699 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5701 mode = GET_MODE_WIDER_MODE (mode))
5702 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5704 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5706 mode = GET_MODE_WIDER_MODE (mode))
5707 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5710 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5712 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5714 mode = GET_MODE_WIDER_MODE (mode))
5715 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5717 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5719 mode = GET_MODE_WIDER_MODE (mode))
5721 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5722 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5725 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5727 mode = GET_MODE_WIDER_MODE (mode))
5729 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5730 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5733 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5735 mode = GET_MODE_WIDER_MODE (mode))
5737 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5738 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5739 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5742 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5744 mode = GET_MODE_WIDER_MODE (mode))
5746 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5747 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5750 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5752 mode = GET_MODE_WIDER_MODE (mode))
5754 FCONST0(mode).data.high = 0;
5755 FCONST0(mode).data.low = 0;
5756 FCONST0(mode).mode = mode;
5757 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5758 FCONST0 (mode), mode);
5761 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5763 mode = GET_MODE_WIDER_MODE (mode))
5765 FCONST0(mode).data.high = 0;
5766 FCONST0(mode).data.low = 0;
5767 FCONST0(mode).mode = mode;
5768 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5769 FCONST0 (mode), mode);
5772 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5774 mode = GET_MODE_WIDER_MODE (mode))
5776 FCONST0(mode).data.high = 0;
5777 FCONST0(mode).data.low = 0;
5778 FCONST0(mode).mode = mode;
5779 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5780 FCONST0 (mode), mode);
5782 /* We store the value 1. */
5783 FCONST1(mode).data.high = 0;
5784 FCONST1(mode).data.low = 0;
5785 FCONST1(mode).mode = mode;
5786 lshift_double (1, 0, GET_MODE_FBIT (mode),
5787 2 * HOST_BITS_PER_WIDE_INT,
5788 &FCONST1(mode).data.low,
5789 &FCONST1(mode).data.high,
5790 SIGNED_FIXED_POINT_MODE_P (mode));
5791 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5792 FCONST1 (mode), mode);
5795 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5797 mode = GET_MODE_WIDER_MODE (mode))
5799 FCONST0(mode).data.high = 0;
5800 FCONST0(mode).data.low = 0;
5801 FCONST0(mode).mode = mode;
5802 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5803 FCONST0 (mode), mode);
5805 /* We store the value 1. */
5806 FCONST1(mode).data.high = 0;
5807 FCONST1(mode).data.low = 0;
5808 FCONST1(mode).mode = mode;
5809 lshift_double (1, 0, GET_MODE_FBIT (mode),
5810 2 * HOST_BITS_PER_WIDE_INT,
5811 &FCONST1(mode).data.low,
5812 &FCONST1(mode).data.high,
5813 SIGNED_FIXED_POINT_MODE_P (mode));
5814 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5815 FCONST1 (mode), mode);
5818 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5820 mode = GET_MODE_WIDER_MODE (mode))
5822 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5825 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5827 mode = GET_MODE_WIDER_MODE (mode))
5829 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5832 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5834 mode = GET_MODE_WIDER_MODE (mode))
5836 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5837 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5840 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5842 mode = GET_MODE_WIDER_MODE (mode))
5844 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5845 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5848 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5849 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5850 const_tiny_rtx[0][i] = const0_rtx;
5852 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5853 if (STORE_FLAG_VALUE == 1)
5854 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5857 /* Produce exact duplicate of insn INSN after AFTER.
5858 Care updating of libcall regions if present. */
5861 emit_copy_of_insn_after (rtx insn, rtx after)
5865 switch (GET_CODE (insn))
5868 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5872 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5876 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5880 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5881 if (CALL_INSN_FUNCTION_USAGE (insn))
5882 CALL_INSN_FUNCTION_USAGE (new_rtx)
5883 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5884 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5885 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5886 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5887 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5888 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5895 /* Update LABEL_NUSES. */
5896 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
5898 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
5900 /* If the old insn is frame related, then so is the new one. This is
5901 primarily needed for IA-64 unwind info which marks epilogue insns,
5902 which may be duplicated by the basic block reordering code. */
5903 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
5905 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5906 will make them. REG_LABEL_TARGETs are created there too, but are
5907 supposed to be sticky, so we copy them. */
5908 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5909 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5911 if (GET_CODE (link) == EXPR_LIST)
5912 add_reg_note (new_rtx, REG_NOTE_KIND (link),
5913 copy_insn_1 (XEXP (link, 0)));
5915 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
5918 INSN_CODE (new_rtx) = INSN_CODE (insn);
5922 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5924 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5926 if (hard_reg_clobbers[mode][regno])
5927 return hard_reg_clobbers[mode][regno];
5929 return (hard_reg_clobbers[mode][regno] =
5930 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5933 #include "gt-emit-rtl.h"