1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
5 Free Software Foundation, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* Middle-to-low level generation of rtx code and insns.
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
39 #include "coretypes.h"
41 #include "diagnostic-core.h"
49 #include "hard-reg-set.h"
51 #include "insn-config.h"
54 #include "basic-block.h"
57 #include "langhooks.h"
58 #include "tree-pass.h"
62 #include "tree-flow.h"
64 struct target_rtl default_target_rtl;
66 struct target_rtl *this_target_rtl = &default_target_rtl;
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
71 /* Commonly used modes. */
73 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
75 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
76 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
78 /* Datastructures maintained for currently processed function in RTL form. */
80 struct rtl_data x_rtl;
82 /* Indexed by pseudo register number, gives the rtx for that pseudo.
83 Allocated in parallel with regno_pointer_align.
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
89 /* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
92 static GTY(()) int label_num = 1;
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx. */
98 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
102 REAL_VALUE_TYPE dconst0;
103 REAL_VALUE_TYPE dconst1;
104 REAL_VALUE_TYPE dconst2;
105 REAL_VALUE_TYPE dconstm1;
106 REAL_VALUE_TYPE dconsthalf;
108 /* Record fixed-point constant 0 and 1. */
109 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
112 /* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
117 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
119 /* A hash table storing CONST_INTs whose absolute value is greater
120 than MAX_SAVED_CONST_INT. */
122 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
123 htab_t const_int_htab;
125 /* A hash table storing memory attribute structures. */
126 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
127 htab_t mem_attrs_htab;
129 /* A hash table storing register attribute structures. */
130 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
131 htab_t reg_attrs_htab;
133 /* A hash table storing all CONST_DOUBLEs. */
134 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
135 htab_t const_double_htab;
137 /* A hash table storing all CONST_FIXEDs. */
138 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
139 htab_t const_fixed_htab;
141 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
142 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
143 #define last_location (crtl->emit.x_last_location)
144 #define first_label_num (crtl->emit.x_first_label_num)
146 static rtx make_call_insn_raw (rtx);
147 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
148 static void set_used_decls (tree);
149 static void mark_label_nuses (rtx);
150 static hashval_t const_int_htab_hash (const void *);
151 static int const_int_htab_eq (const void *, const void *);
152 static hashval_t const_double_htab_hash (const void *);
153 static int const_double_htab_eq (const void *, const void *);
154 static rtx lookup_const_double (rtx);
155 static hashval_t const_fixed_htab_hash (const void *);
156 static int const_fixed_htab_eq (const void *, const void *);
157 static rtx lookup_const_fixed (rtx);
158 static hashval_t mem_attrs_htab_hash (const void *);
159 static int mem_attrs_htab_eq (const void *, const void *);
160 static hashval_t reg_attrs_htab_hash (const void *);
161 static int reg_attrs_htab_eq (const void *, const void *);
162 static reg_attrs *get_reg_attrs (tree, int);
163 static rtx gen_const_vector (enum machine_mode, int);
164 static void copy_rtx_if_shared_1 (rtx *orig);
166 /* Probability of the conditional branch currently proceeded by try_split.
167 Set to -1 otherwise. */
168 int split_branch_probability = -1;
170 /* Returns a hash code for X (which is a really a CONST_INT). */
173 const_int_htab_hash (const void *x)
175 return (hashval_t) INTVAL ((const_rtx) x);
178 /* Returns nonzero if the value represented by X (which is really a
179 CONST_INT) is the same as that given by Y (which is really a
183 const_int_htab_eq (const void *x, const void *y)
185 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
188 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
190 const_double_htab_hash (const void *x)
192 const_rtx const value = (const_rtx) x;
195 if (GET_MODE (value) == VOIDmode)
196 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
199 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
200 /* MODE is used in the comparison, so it should be in the hash. */
201 h ^= GET_MODE (value);
206 /* Returns nonzero if the value represented by X (really a ...)
207 is the same as that represented by Y (really a ...) */
209 const_double_htab_eq (const void *x, const void *y)
211 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
213 if (GET_MODE (a) != GET_MODE (b))
215 if (GET_MODE (a) == VOIDmode)
216 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
217 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
219 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
220 CONST_DOUBLE_REAL_VALUE (b));
223 /* Returns a hash code for X (which is really a CONST_FIXED). */
226 const_fixed_htab_hash (const void *x)
228 const_rtx const value = (const_rtx) x;
231 h = fixed_hash (CONST_FIXED_VALUE (value));
232 /* MODE is used in the comparison, so it should be in the hash. */
233 h ^= GET_MODE (value);
237 /* Returns nonzero if the value represented by X (really a ...)
238 is the same as that represented by Y (really a ...). */
241 const_fixed_htab_eq (const void *x, const void *y)
243 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
245 if (GET_MODE (a) != GET_MODE (b))
247 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
250 /* Returns a hash code for X (which is a really a mem_attrs *). */
253 mem_attrs_htab_hash (const void *x)
255 const mem_attrs *const p = (const mem_attrs *) x;
257 return (p->alias ^ (p->align * 1000)
258 ^ (p->addrspace * 4000)
259 ^ ((p->offset_known_p ? p->offset : 0) * 50000)
260 ^ ((p->size_known_p ? p->size : 0) * 2500000)
261 ^ (size_t) iterative_hash_expr (p->expr, 0));
264 /* Return true if the given memory attributes are equal. */
267 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
269 return (p->alias == q->alias
270 && p->offset_known_p == q->offset_known_p
271 && (!p->offset_known_p || p->offset == q->offset)
272 && p->size_known_p == q->size_known_p
273 && (!p->size_known_p || p->size == q->size)
274 && p->align == q->align
275 && p->addrspace == q->addrspace
276 && (p->expr == q->expr
277 || (p->expr != NULL_TREE && q->expr != NULL_TREE
278 && operand_equal_p (p->expr, q->expr, 0))));
281 /* Returns nonzero if the value represented by X (which is really a
282 mem_attrs *) is the same as that given by Y (which is also really a
286 mem_attrs_htab_eq (const void *x, const void *y)
288 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
291 /* Set MEM's memory attributes so that they are the same as ATTRS. */
294 set_mem_attrs (rtx mem, mem_attrs *attrs)
298 /* If everything is the default, we can just clear the attributes. */
299 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
305 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
308 *slot = ggc_alloc_mem_attrs ();
309 memcpy (*slot, attrs, sizeof (mem_attrs));
312 MEM_ATTRS (mem) = (mem_attrs *) *slot;
315 /* Returns a hash code for X (which is a really a reg_attrs *). */
318 reg_attrs_htab_hash (const void *x)
320 const reg_attrs *const p = (const reg_attrs *) x;
322 return ((p->offset * 1000) ^ (intptr_t) p->decl);
325 /* Returns nonzero if the value represented by X (which is really a
326 reg_attrs *) is the same as that given by Y (which is also really a
330 reg_attrs_htab_eq (const void *x, const void *y)
332 const reg_attrs *const p = (const reg_attrs *) x;
333 const reg_attrs *const q = (const reg_attrs *) y;
335 return (p->decl == q->decl && p->offset == q->offset);
337 /* Allocate a new reg_attrs structure and insert it into the hash table if
338 one identical to it is not already in the table. We are doing this for
342 get_reg_attrs (tree decl, int offset)
347 /* If everything is the default, we can just return zero. */
348 if (decl == 0 && offset == 0)
352 attrs.offset = offset;
354 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
357 *slot = ggc_alloc_reg_attrs ();
358 memcpy (*slot, &attrs, sizeof (reg_attrs));
361 return (reg_attrs *) *slot;
366 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
372 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
373 MEM_VOLATILE_P (x) = true;
379 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
380 don't attempt to share with the various global pieces of rtl (such as
381 frame_pointer_rtx). */
384 gen_raw_REG (enum machine_mode mode, int regno)
386 rtx x = gen_rtx_raw_REG (mode, regno);
387 ORIGINAL_REGNO (x) = regno;
391 /* There are some RTL codes that require special attention; the generation
392 functions do the raw handling. If you add to this list, modify
393 special_rtx in gengenrtl.c as well. */
396 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
400 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
401 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
403 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
404 if (const_true_rtx && arg == STORE_FLAG_VALUE)
405 return const_true_rtx;
408 /* Look up the CONST_INT in the hash table. */
409 slot = htab_find_slot_with_hash (const_int_htab, &arg,
410 (hashval_t) arg, INSERT);
412 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
418 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
420 return GEN_INT (trunc_int_for_mode (c, mode));
423 /* CONST_DOUBLEs might be created from pairs of integers, or from
424 REAL_VALUE_TYPEs. Also, their length is known only at run time,
425 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
427 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
428 hash table. If so, return its counterpart; otherwise add it
429 to the hash table and return it. */
431 lookup_const_double (rtx real)
433 void **slot = htab_find_slot (const_double_htab, real, INSERT);
440 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
441 VALUE in mode MODE. */
443 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
445 rtx real = rtx_alloc (CONST_DOUBLE);
446 PUT_MODE (real, mode);
450 return lookup_const_double (real);
453 /* Determine whether FIXED, a CONST_FIXED, already exists in the
454 hash table. If so, return its counterpart; otherwise add it
455 to the hash table and return it. */
458 lookup_const_fixed (rtx fixed)
460 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
467 /* Return a CONST_FIXED rtx for a fixed-point value specified by
468 VALUE in mode MODE. */
471 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
473 rtx fixed = rtx_alloc (CONST_FIXED);
474 PUT_MODE (fixed, mode);
478 return lookup_const_fixed (fixed);
481 /* Constructs double_int from rtx CST. */
484 rtx_to_double_int (const_rtx cst)
488 if (CONST_INT_P (cst))
489 r = shwi_to_double_int (INTVAL (cst));
490 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
492 r.low = CONST_DOUBLE_LOW (cst);
493 r.high = CONST_DOUBLE_HIGH (cst);
502 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
506 immed_double_int_const (double_int i, enum machine_mode mode)
508 return immed_double_const (i.low, i.high, mode);
511 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
512 of ints: I0 is the low-order word and I1 is the high-order word.
513 Do not use this routine for non-integer modes; convert to
514 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
517 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
522 /* There are the following cases (note that there are no modes with
523 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
525 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
527 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
528 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
529 from copies of the sign bit, and sign of i0 and i1 are the same), then
530 we return a CONST_INT for i0.
531 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
532 if (mode != VOIDmode)
534 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
535 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
536 /* We can get a 0 for an error mark. */
537 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
538 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
540 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
541 return gen_int_mode (i0, mode);
543 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
546 /* If this integer fits in one word, return a CONST_INT. */
547 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
550 /* We use VOIDmode for integers. */
551 value = rtx_alloc (CONST_DOUBLE);
552 PUT_MODE (value, VOIDmode);
554 CONST_DOUBLE_LOW (value) = i0;
555 CONST_DOUBLE_HIGH (value) = i1;
557 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
558 XWINT (value, i) = 0;
560 return lookup_const_double (value);
564 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
566 /* In case the MD file explicitly references the frame pointer, have
567 all such references point to the same frame pointer. This is
568 used during frame pointer elimination to distinguish the explicit
569 references to these registers from pseudos that happened to be
572 If we have eliminated the frame pointer or arg pointer, we will
573 be using it as a normal register, for example as a spill
574 register. In such cases, we might be accessing it in a mode that
575 is not Pmode and therefore cannot use the pre-allocated rtx.
577 Also don't do this when we are making new REGs in reload, since
578 we don't want to get confused with the real pointers. */
580 if (mode == Pmode && !reload_in_progress)
582 if (regno == FRAME_POINTER_REGNUM
583 && (!reload_completed || frame_pointer_needed))
584 return frame_pointer_rtx;
585 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
586 if (regno == HARD_FRAME_POINTER_REGNUM
587 && (!reload_completed || frame_pointer_needed))
588 return hard_frame_pointer_rtx;
590 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
591 if (regno == ARG_POINTER_REGNUM)
592 return arg_pointer_rtx;
594 #ifdef RETURN_ADDRESS_POINTER_REGNUM
595 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
596 return return_address_pointer_rtx;
598 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
599 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
600 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
601 return pic_offset_table_rtx;
602 if (regno == STACK_POINTER_REGNUM)
603 return stack_pointer_rtx;
607 /* If the per-function register table has been set up, try to re-use
608 an existing entry in that table to avoid useless generation of RTL.
610 This code is disabled for now until we can fix the various backends
611 which depend on having non-shared hard registers in some cases. Long
612 term we want to re-enable this code as it can significantly cut down
613 on the amount of useless RTL that gets generated.
615 We'll also need to fix some code that runs after reload that wants to
616 set ORIGINAL_REGNO. */
621 && regno < FIRST_PSEUDO_REGISTER
622 && reg_raw_mode[regno] == mode)
623 return regno_reg_rtx[regno];
626 return gen_raw_REG (mode, regno);
630 gen_rtx_MEM (enum machine_mode mode, rtx addr)
632 rtx rt = gen_rtx_raw_MEM (mode, addr);
634 /* This field is not cleared by the mere allocation of the rtx, so
641 /* Generate a memory referring to non-trapping constant memory. */
644 gen_const_mem (enum machine_mode mode, rtx addr)
646 rtx mem = gen_rtx_MEM (mode, addr);
647 MEM_READONLY_P (mem) = 1;
648 MEM_NOTRAP_P (mem) = 1;
652 /* Generate a MEM referring to fixed portions of the frame, e.g., register
656 gen_frame_mem (enum machine_mode mode, rtx addr)
658 rtx mem = gen_rtx_MEM (mode, addr);
659 MEM_NOTRAP_P (mem) = 1;
660 set_mem_alias_set (mem, get_frame_alias_set ());
664 /* Generate a MEM referring to a temporary use of the stack, not part
665 of the fixed stack frame. For example, something which is pushed
666 by a target splitter. */
668 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
670 rtx mem = gen_rtx_MEM (mode, addr);
671 MEM_NOTRAP_P (mem) = 1;
672 if (!cfun->calls_alloca)
673 set_mem_alias_set (mem, get_frame_alias_set ());
677 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
678 this construct would be valid, and false otherwise. */
681 validate_subreg (enum machine_mode omode, enum machine_mode imode,
682 const_rtx reg, unsigned int offset)
684 unsigned int isize = GET_MODE_SIZE (imode);
685 unsigned int osize = GET_MODE_SIZE (omode);
687 /* All subregs must be aligned. */
688 if (offset % osize != 0)
691 /* The subreg offset cannot be outside the inner object. */
695 /* ??? This should not be here. Temporarily continue to allow word_mode
696 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
697 Generally, backends are doing something sketchy but it'll take time to
699 if (omode == word_mode)
701 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
702 is the culprit here, and not the backends. */
703 else if (osize >= UNITS_PER_WORD && isize >= osize)
705 /* Allow component subregs of complex and vector. Though given the below
706 extraction rules, it's not always clear what that means. */
707 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
708 && GET_MODE_INNER (imode) == omode)
710 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
711 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
712 represent this. It's questionable if this ought to be represented at
713 all -- why can't this all be hidden in post-reload splitters that make
714 arbitrarily mode changes to the registers themselves. */
715 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
717 /* Subregs involving floating point modes are not allowed to
718 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
719 (subreg:SI (reg:DF) 0) isn't. */
720 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
726 /* Paradoxical subregs must have offset zero. */
730 /* This is a normal subreg. Verify that the offset is representable. */
732 /* For hard registers, we already have most of these rules collected in
733 subreg_offset_representable_p. */
734 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
736 unsigned int regno = REGNO (reg);
738 #ifdef CANNOT_CHANGE_MODE_CLASS
739 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
740 && GET_MODE_INNER (imode) == omode)
742 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
746 return subreg_offset_representable_p (regno, imode, offset, omode);
749 /* For pseudo registers, we want most of the same checks. Namely:
750 If the register no larger than a word, the subreg must be lowpart.
751 If the register is larger than a word, the subreg must be the lowpart
752 of a subword. A subreg does *not* perform arbitrary bit extraction.
753 Given that we've already checked mode/offset alignment, we only have
754 to check subword subregs here. */
755 if (osize < UNITS_PER_WORD)
757 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
758 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
759 if (offset % UNITS_PER_WORD != low_off)
766 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
768 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
769 return gen_rtx_raw_SUBREG (mode, reg, offset);
772 /* Generate a SUBREG representing the least-significant part of REG if MODE
773 is smaller than mode of REG, otherwise paradoxical SUBREG. */
776 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
778 enum machine_mode inmode;
780 inmode = GET_MODE (reg);
781 if (inmode == VOIDmode)
783 return gen_rtx_SUBREG (mode, reg,
784 subreg_lowpart_offset (mode, inmode));
788 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
791 gen_rtvec (int n, ...)
799 /* Don't allocate an empty rtvec... */
806 rt_val = rtvec_alloc (n);
808 for (i = 0; i < n; i++)
809 rt_val->elem[i] = va_arg (p, rtx);
816 gen_rtvec_v (int n, rtx *argp)
821 /* Don't allocate an empty rtvec... */
825 rt_val = rtvec_alloc (n);
827 for (i = 0; i < n; i++)
828 rt_val->elem[i] = *argp++;
833 /* Return the number of bytes between the start of an OUTER_MODE
834 in-memory value and the start of an INNER_MODE in-memory value,
835 given that the former is a lowpart of the latter. It may be a
836 paradoxical lowpart, in which case the offset will be negative
837 on big-endian targets. */
840 byte_lowpart_offset (enum machine_mode outer_mode,
841 enum machine_mode inner_mode)
843 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
844 return subreg_lowpart_offset (outer_mode, inner_mode);
846 return -subreg_lowpart_offset (inner_mode, outer_mode);
849 /* Generate a REG rtx for a new pseudo register of mode MODE.
850 This pseudo is assigned the next sequential register number. */
853 gen_reg_rtx (enum machine_mode mode)
856 unsigned int align = GET_MODE_ALIGNMENT (mode);
858 gcc_assert (can_create_pseudo_p ());
860 /* If a virtual register with bigger mode alignment is generated,
861 increase stack alignment estimation because it might be spilled
863 if (SUPPORTS_STACK_ALIGNMENT
864 && crtl->stack_alignment_estimated < align
865 && !crtl->stack_realign_processed)
867 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
868 if (crtl->stack_alignment_estimated < min_align)
869 crtl->stack_alignment_estimated = min_align;
872 if (generating_concat_p
873 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
874 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
876 /* For complex modes, don't make a single pseudo.
877 Instead, make a CONCAT of two pseudos.
878 This allows noncontiguous allocation of the real and imaginary parts,
879 which makes much better code. Besides, allocating DCmode
880 pseudos overstrains reload on some machines like the 386. */
881 rtx realpart, imagpart;
882 enum machine_mode partmode = GET_MODE_INNER (mode);
884 realpart = gen_reg_rtx (partmode);
885 imagpart = gen_reg_rtx (partmode);
886 return gen_rtx_CONCAT (mode, realpart, imagpart);
889 /* Make sure regno_pointer_align, and regno_reg_rtx are large
890 enough to have an element for this pseudo reg number. */
892 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
894 int old_size = crtl->emit.regno_pointer_align_length;
898 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
899 memset (tmp + old_size, 0, old_size);
900 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
902 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
903 memset (new1 + old_size, 0, old_size * sizeof (rtx));
904 regno_reg_rtx = new1;
906 crtl->emit.regno_pointer_align_length = old_size * 2;
909 val = gen_raw_REG (mode, reg_rtx_no);
910 regno_reg_rtx[reg_rtx_no++] = val;
914 /* Update NEW with the same attributes as REG, but with OFFSET added
915 to the REG_OFFSET. */
918 update_reg_offset (rtx new_rtx, rtx reg, int offset)
920 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
921 REG_OFFSET (reg) + offset);
924 /* Generate a register with same attributes as REG, but with OFFSET
925 added to the REG_OFFSET. */
928 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
931 rtx new_rtx = gen_rtx_REG (mode, regno);
933 update_reg_offset (new_rtx, reg, offset);
937 /* Generate a new pseudo-register with the same attributes as REG, but
938 with OFFSET added to the REG_OFFSET. */
941 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
943 rtx new_rtx = gen_reg_rtx (mode);
945 update_reg_offset (new_rtx, reg, offset);
949 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
950 new register is a (possibly paradoxical) lowpart of the old one. */
953 adjust_reg_mode (rtx reg, enum machine_mode mode)
955 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
956 PUT_MODE (reg, mode);
959 /* Copy REG's attributes from X, if X has any attributes. If REG and X
960 have different modes, REG is a (possibly paradoxical) lowpart of X. */
963 set_reg_attrs_from_value (rtx reg, rtx x)
967 /* Hard registers can be reused for multiple purposes within the same
968 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
970 if (HARD_REGISTER_P (reg))
973 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
976 if (MEM_OFFSET_KNOWN_P (x))
977 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
978 MEM_OFFSET (x) + offset);
980 mark_reg_pointer (reg, 0);
985 update_reg_offset (reg, x, offset);
987 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
991 /* Generate a REG rtx for a new pseudo register, copying the mode
992 and attributes from X. */
995 gen_reg_rtx_and_attrs (rtx x)
997 rtx reg = gen_reg_rtx (GET_MODE (x));
998 set_reg_attrs_from_value (reg, x);
1002 /* Set the register attributes for registers contained in PARM_RTX.
1003 Use needed values from memory attributes of MEM. */
1006 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1008 if (REG_P (parm_rtx))
1009 set_reg_attrs_from_value (parm_rtx, mem);
1010 else if (GET_CODE (parm_rtx) == PARALLEL)
1012 /* Check for a NULL entry in the first slot, used to indicate that the
1013 parameter goes both on the stack and in registers. */
1014 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1015 for (; i < XVECLEN (parm_rtx, 0); i++)
1017 rtx x = XVECEXP (parm_rtx, 0, i);
1018 if (REG_P (XEXP (x, 0)))
1019 REG_ATTRS (XEXP (x, 0))
1020 = get_reg_attrs (MEM_EXPR (mem),
1021 INTVAL (XEXP (x, 1)));
1026 /* Set the REG_ATTRS for registers in value X, given that X represents
1030 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1032 if (GET_CODE (x) == SUBREG)
1034 gcc_assert (subreg_lowpart_p (x));
1039 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1041 if (GET_CODE (x) == CONCAT)
1043 if (REG_P (XEXP (x, 0)))
1044 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1045 if (REG_P (XEXP (x, 1)))
1046 REG_ATTRS (XEXP (x, 1))
1047 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1049 if (GET_CODE (x) == PARALLEL)
1053 /* Check for a NULL entry, used to indicate that the parameter goes
1054 both on the stack and in registers. */
1055 if (XEXP (XVECEXP (x, 0, 0), 0))
1060 for (i = start; i < XVECLEN (x, 0); i++)
1062 rtx y = XVECEXP (x, 0, i);
1063 if (REG_P (XEXP (y, 0)))
1064 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1069 /* Assign the RTX X to declaration T. */
1072 set_decl_rtl (tree t, rtx x)
1074 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1076 set_reg_attrs_for_decl_rtl (t, x);
1079 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1080 if the ABI requires the parameter to be passed by reference. */
1083 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1085 DECL_INCOMING_RTL (t) = x;
1086 if (x && !by_reference_p)
1087 set_reg_attrs_for_decl_rtl (t, x);
1090 /* Identify REG (which may be a CONCAT) as a user register. */
1093 mark_user_reg (rtx reg)
1095 if (GET_CODE (reg) == CONCAT)
1097 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1098 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1102 gcc_assert (REG_P (reg));
1103 REG_USERVAR_P (reg) = 1;
1107 /* Identify REG as a probable pointer register and show its alignment
1108 as ALIGN, if nonzero. */
1111 mark_reg_pointer (rtx reg, int align)
1113 if (! REG_POINTER (reg))
1115 REG_POINTER (reg) = 1;
1118 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1120 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1121 /* We can no-longer be sure just how aligned this pointer is. */
1122 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1125 /* Return 1 plus largest pseudo reg number used in the current function. */
1133 /* Return 1 + the largest label number used so far in the current function. */
1136 max_label_num (void)
1141 /* Return first label number used in this function (if any were used). */
1144 get_first_label_num (void)
1146 return first_label_num;
1149 /* If the rtx for label was created during the expansion of a nested
1150 function, then first_label_num won't include this label number.
1151 Fix this now so that array indices work later. */
1154 maybe_set_first_label_num (rtx x)
1156 if (CODE_LABEL_NUMBER (x) < first_label_num)
1157 first_label_num = CODE_LABEL_NUMBER (x);
1160 /* Return a value representing some low-order bits of X, where the number
1161 of low-order bits is given by MODE. Note that no conversion is done
1162 between floating-point and fixed-point values, rather, the bit
1163 representation is returned.
1165 This function handles the cases in common between gen_lowpart, below,
1166 and two variants in cse.c and combine.c. These are the cases that can
1167 be safely handled at all points in the compilation.
1169 If this is not a case we can handle, return 0. */
1172 gen_lowpart_common (enum machine_mode mode, rtx x)
1174 int msize = GET_MODE_SIZE (mode);
1177 enum machine_mode innermode;
1179 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1180 so we have to make one up. Yuk. */
1181 innermode = GET_MODE (x);
1183 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1184 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1185 else if (innermode == VOIDmode)
1186 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1188 xsize = GET_MODE_SIZE (innermode);
1190 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1192 if (innermode == mode)
1195 /* MODE must occupy no more words than the mode of X. */
1196 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1197 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1200 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1201 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1204 offset = subreg_lowpart_offset (mode, innermode);
1206 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1207 && (GET_MODE_CLASS (mode) == MODE_INT
1208 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1210 /* If we are getting the low-order part of something that has been
1211 sign- or zero-extended, we can either just use the object being
1212 extended or make a narrower extension. If we want an even smaller
1213 piece than the size of the object being extended, call ourselves
1216 This case is used mostly by combine and cse. */
1218 if (GET_MODE (XEXP (x, 0)) == mode)
1220 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1221 return gen_lowpart_common (mode, XEXP (x, 0));
1222 else if (msize < xsize)
1223 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1225 else if (GET_CODE (x) == SUBREG || REG_P (x)
1226 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1227 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
1228 return simplify_gen_subreg (mode, x, innermode, offset);
1230 /* Otherwise, we can't do this. */
1235 gen_highpart (enum machine_mode mode, rtx x)
1237 unsigned int msize = GET_MODE_SIZE (mode);
1240 /* This case loses if X is a subreg. To catch bugs early,
1241 complain if an invalid MODE is used even in other cases. */
1242 gcc_assert (msize <= UNITS_PER_WORD
1243 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1245 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1246 subreg_highpart_offset (mode, GET_MODE (x)));
1247 gcc_assert (result);
1249 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1250 the target if we have a MEM. gen_highpart must return a valid operand,
1251 emitting code if necessary to do so. */
1254 result = validize_mem (result);
1255 gcc_assert (result);
1261 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1262 be VOIDmode constant. */
1264 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1266 if (GET_MODE (exp) != VOIDmode)
1268 gcc_assert (GET_MODE (exp) == innermode);
1269 return gen_highpart (outermode, exp);
1271 return simplify_gen_subreg (outermode, exp, innermode,
1272 subreg_highpart_offset (outermode, innermode));
1275 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1278 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1280 unsigned int offset = 0;
1281 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1285 if (WORDS_BIG_ENDIAN)
1286 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1287 if (BYTES_BIG_ENDIAN)
1288 offset += difference % UNITS_PER_WORD;
1294 /* Return offset in bytes to get OUTERMODE high part
1295 of the value in mode INNERMODE stored in memory in target format. */
1297 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1299 unsigned int offset = 0;
1300 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1302 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1306 if (! WORDS_BIG_ENDIAN)
1307 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1308 if (! BYTES_BIG_ENDIAN)
1309 offset += difference % UNITS_PER_WORD;
1315 /* Return 1 iff X, assumed to be a SUBREG,
1316 refers to the least significant part of its containing reg.
1317 If X is not a SUBREG, always return 1 (it is its own low part!). */
1320 subreg_lowpart_p (const_rtx x)
1322 if (GET_CODE (x) != SUBREG)
1324 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1327 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1328 == SUBREG_BYTE (x));
1331 /* Return true if X is a paradoxical subreg, false otherwise. */
1333 paradoxical_subreg_p (const_rtx x)
1335 if (GET_CODE (x) != SUBREG)
1337 return (GET_MODE_PRECISION (GET_MODE (x))
1338 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1341 /* Return subword OFFSET of operand OP.
1342 The word number, OFFSET, is interpreted as the word number starting
1343 at the low-order address. OFFSET 0 is the low-order word if not
1344 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1346 If we cannot extract the required word, we return zero. Otherwise,
1347 an rtx corresponding to the requested word will be returned.
1349 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1350 reload has completed, a valid address will always be returned. After
1351 reload, if a valid address cannot be returned, we return zero.
1353 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1354 it is the responsibility of the caller.
1356 MODE is the mode of OP in case it is a CONST_INT.
1358 ??? This is still rather broken for some cases. The problem for the
1359 moment is that all callers of this thing provide no 'goal mode' to
1360 tell us to work with. This exists because all callers were written
1361 in a word based SUBREG world.
1362 Now use of this function can be deprecated by simplify_subreg in most
1367 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1369 if (mode == VOIDmode)
1370 mode = GET_MODE (op);
1372 gcc_assert (mode != VOIDmode);
1374 /* If OP is narrower than a word, fail. */
1376 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1379 /* If we want a word outside OP, return zero. */
1381 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1384 /* Form a new MEM at the requested address. */
1387 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1389 if (! validate_address)
1392 else if (reload_completed)
1394 if (! strict_memory_address_addr_space_p (word_mode,
1396 MEM_ADDR_SPACE (op)))
1400 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1403 /* Rest can be handled by simplify_subreg. */
1404 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1407 /* Similar to `operand_subword', but never return 0. If we can't
1408 extract the required subword, put OP into a register and try again.
1409 The second attempt must succeed. We always validate the address in
1412 MODE is the mode of OP, in case it is CONST_INT. */
1415 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1417 rtx result = operand_subword (op, offset, 1, mode);
1422 if (mode != BLKmode && mode != VOIDmode)
1424 /* If this is a register which can not be accessed by words, copy it
1425 to a pseudo register. */
1427 op = copy_to_reg (op);
1429 op = force_reg (mode, op);
1432 result = operand_subword (op, offset, 1, mode);
1433 gcc_assert (result);
1438 /* Returns 1 if both MEM_EXPR can be considered equal
1442 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1447 if (! expr1 || ! expr2)
1450 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1453 return operand_equal_p (expr1, expr2, 0);
1456 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1457 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1461 get_mem_align_offset (rtx mem, unsigned int align)
1464 unsigned HOST_WIDE_INT offset;
1466 /* This function can't use
1467 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1468 || (MAX (MEM_ALIGN (mem),
1469 get_object_alignment (MEM_EXPR (mem), align))
1473 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1475 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1476 for <variable>. get_inner_reference doesn't handle it and
1477 even if it did, the alignment in that case needs to be determined
1478 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1479 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1480 isn't sufficiently aligned, the object it is in might be. */
1481 gcc_assert (MEM_P (mem));
1482 expr = MEM_EXPR (mem);
1483 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1486 offset = MEM_OFFSET (mem);
1489 if (DECL_ALIGN (expr) < align)
1492 else if (INDIRECT_REF_P (expr))
1494 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1497 else if (TREE_CODE (expr) == COMPONENT_REF)
1501 tree inner = TREE_OPERAND (expr, 0);
1502 tree field = TREE_OPERAND (expr, 1);
1503 tree byte_offset = component_ref_field_offset (expr);
1504 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1507 || !host_integerp (byte_offset, 1)
1508 || !host_integerp (bit_offset, 1))
1511 offset += tree_low_cst (byte_offset, 1);
1512 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1514 if (inner == NULL_TREE)
1516 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1517 < (unsigned int) align)
1521 else if (DECL_P (inner))
1523 if (DECL_ALIGN (inner) < align)
1527 else if (TREE_CODE (inner) != COMPONENT_REF)
1535 return offset & ((align / BITS_PER_UNIT) - 1);
1538 /* Given REF (a MEM) and T, either the type of X or the expression
1539 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1540 if we are making a new object of this type. BITPOS is nonzero if
1541 there is an offset outstanding on T that will be applied later. */
1544 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1545 HOST_WIDE_INT bitpos)
1547 HOST_WIDE_INT apply_bitpos = 0;
1549 struct mem_attrs attrs, *defattrs, *refattrs;
1551 /* It can happen that type_for_mode was given a mode for which there
1552 is no language-level type. In which case it returns NULL, which
1557 type = TYPE_P (t) ? t : TREE_TYPE (t);
1558 if (type == error_mark_node)
1561 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1562 wrong answer, as it assumes that DECL_RTL already has the right alias
1563 info. Callers should not set DECL_RTL until after the call to
1564 set_mem_attributes. */
1565 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1567 memset (&attrs, 0, sizeof (attrs));
1569 /* Get the alias set from the expression or type (perhaps using a
1570 front-end routine) and use it. */
1571 attrs.alias = get_alias_set (t);
1573 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1574 MEM_IN_STRUCT_P (ref)
1575 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
1576 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1578 /* If we are making an object of this type, or if this is a DECL, we know
1579 that it is a scalar if the type is not an aggregate. */
1580 if ((objectp || DECL_P (t))
1581 && ! AGGREGATE_TYPE_P (type)
1582 && TREE_CODE (type) != COMPLEX_TYPE)
1583 MEM_SCALAR_P (ref) = 1;
1585 /* Default values from pre-existing memory attributes if present. */
1586 refattrs = MEM_ATTRS (ref);
1589 /* ??? Can this ever happen? Calling this routine on a MEM that
1590 already carries memory attributes should probably be invalid. */
1591 attrs.expr = refattrs->expr;
1592 attrs.offset_known_p = refattrs->offset_known_p;
1593 attrs.offset = refattrs->offset;
1594 attrs.size_known_p = refattrs->size_known_p;
1595 attrs.size = refattrs->size;
1596 attrs.align = refattrs->align;
1599 /* Otherwise, default values from the mode of the MEM reference. */
1602 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1603 gcc_assert (!defattrs->expr);
1604 gcc_assert (!defattrs->offset_known_p);
1606 /* Respect mode size. */
1607 attrs.size_known_p = defattrs->size_known_p;
1608 attrs.size = defattrs->size;
1609 /* ??? Is this really necessary? We probably should always get
1610 the size from the type below. */
1612 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1613 if T is an object, always compute the object alignment below. */
1615 attrs.align = defattrs->align;
1617 attrs.align = BITS_PER_UNIT;
1618 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1619 e.g. if the type carries an alignment attribute. Should we be
1620 able to simply always use TYPE_ALIGN? */
1623 /* We can set the alignment from the type if we are making an object,
1624 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1625 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1626 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1628 else if (TREE_CODE (t) == MEM_REF)
1630 tree op0 = TREE_OPERAND (t, 0);
1631 if (TREE_CODE (op0) == ADDR_EXPR
1632 && (DECL_P (TREE_OPERAND (op0, 0))
1633 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
1635 if (DECL_P (TREE_OPERAND (op0, 0)))
1636 attrs.align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1637 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1639 attrs.align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
1640 #ifdef CONSTANT_ALIGNMENT
1641 attrs.align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0),
1645 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1647 unsigned HOST_WIDE_INT ioff
1648 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1649 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1650 attrs.align = MIN (aoff, attrs.align);
1654 /* ??? This isn't fully correct, we can't set the alignment from the
1655 type in all cases. */
1656 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1659 else if (TREE_CODE (t) == TARGET_MEM_REF)
1660 /* ??? This isn't fully correct, we can't set the alignment from the
1661 type in all cases. */
1662 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1664 /* If the size is known, we can set that. */
1665 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1667 attrs.size_known_p = true;
1668 attrs.size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1671 /* If T is not a type, we may be able to deduce some more information about
1676 bool align_computed = false;
1678 if (TREE_THIS_VOLATILE (t))
1679 MEM_VOLATILE_P (ref) = 1;
1681 /* Now remove any conversions: they don't change what the underlying
1682 object is. Likewise for SAVE_EXPR. */
1683 while (CONVERT_EXPR_P (t)
1684 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1685 || TREE_CODE (t) == SAVE_EXPR)
1686 t = TREE_OPERAND (t, 0);
1688 /* Note whether this expression can trap. */
1689 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1691 base = get_base_address (t);
1692 if (base && DECL_P (base)
1693 && TREE_READONLY (base)
1694 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1695 && !TREE_THIS_VOLATILE (base))
1696 MEM_READONLY_P (ref) = 1;
1698 /* If this expression uses it's parent's alias set, mark it such
1699 that we won't change it. */
1700 if (component_uses_parent_alias_set (t))
1701 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1703 /* If this is a decl, set the attributes of the MEM from it. */
1707 attrs.offset_known_p = true;
1709 apply_bitpos = bitpos;
1710 if (DECL_SIZE_UNIT (t) && host_integerp (DECL_SIZE_UNIT (t), 1))
1712 attrs.size_known_p = true;
1713 attrs.size = tree_low_cst (DECL_SIZE_UNIT (t), 1);
1716 attrs.size_known_p = false;
1717 attrs.align = DECL_ALIGN (t);
1718 align_computed = true;
1721 /* If this is a constant, we know the alignment. */
1722 else if (CONSTANT_CLASS_P (t))
1724 attrs.align = TYPE_ALIGN (type);
1725 #ifdef CONSTANT_ALIGNMENT
1726 attrs.align = CONSTANT_ALIGNMENT (t, attrs.align);
1728 align_computed = true;
1731 /* If this is a field reference and not a bit-field, record it. */
1732 /* ??? There is some information that can be gleaned from bit-fields,
1733 such as the word offset in the structure that might be modified.
1734 But skip it for now. */
1735 else if (TREE_CODE (t) == COMPONENT_REF
1736 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1739 attrs.offset_known_p = true;
1741 apply_bitpos = bitpos;
1742 /* ??? Any reason the field size would be different than
1743 the size we got from the type? */
1746 /* If this is an array reference, look for an outer field reference. */
1747 else if (TREE_CODE (t) == ARRAY_REF)
1749 tree off_tree = size_zero_node;
1750 /* We can't modify t, because we use it at the end of the
1756 tree index = TREE_OPERAND (t2, 1);
1757 tree low_bound = array_ref_low_bound (t2);
1758 tree unit_size = array_ref_element_size (t2);
1760 /* We assume all arrays have sizes that are a multiple of a byte.
1761 First subtract the lower bound, if any, in the type of the
1762 index, then convert to sizetype and multiply by the size of
1763 the array element. */
1764 if (! integer_zerop (low_bound))
1765 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1768 off_tree = size_binop (PLUS_EXPR,
1769 size_binop (MULT_EXPR,
1770 fold_convert (sizetype,
1774 t2 = TREE_OPERAND (t2, 0);
1776 while (TREE_CODE (t2) == ARRAY_REF);
1781 attrs.offset_known_p = false;
1782 if (host_integerp (off_tree, 1))
1784 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1785 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1786 attrs.align = DECL_ALIGN (t2);
1787 if (aoff && (unsigned HOST_WIDE_INT) aoff < attrs.align)
1789 align_computed = true;
1790 attrs.offset_known_p = true;
1791 attrs.offset = ioff;
1792 apply_bitpos = bitpos;
1795 else if (TREE_CODE (t2) == COMPONENT_REF)
1798 attrs.offset_known_p = false;
1799 if (host_integerp (off_tree, 1))
1801 attrs.offset_known_p = true;
1802 attrs.offset = tree_low_cst (off_tree, 1);
1803 apply_bitpos = bitpos;
1805 /* ??? Any reason the field size would be different than
1806 the size we got from the type? */
1809 /* If this is an indirect reference, record it. */
1810 else if (TREE_CODE (t) == MEM_REF)
1813 attrs.offset_known_p = true;
1815 apply_bitpos = bitpos;
1819 /* If this is an indirect reference, record it. */
1820 else if (TREE_CODE (t) == MEM_REF
1821 || TREE_CODE (t) == TARGET_MEM_REF)
1824 attrs.offset_known_p = true;
1826 apply_bitpos = bitpos;
1829 if (!align_computed && !INDIRECT_REF_P (t))
1831 unsigned int obj_align = get_object_alignment (t, BIGGEST_ALIGNMENT);
1832 attrs.align = MAX (attrs.align, obj_align);
1836 /* If we modified OFFSET based on T, then subtract the outstanding
1837 bit position offset. Similarly, increase the size of the accessed
1838 object to contain the negative offset. */
1841 gcc_assert (attrs.offset_known_p);
1842 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1843 if (attrs.size_known_p)
1844 attrs.size += apply_bitpos / BITS_PER_UNIT;
1847 /* Now set the attributes we computed above. */
1848 attrs.addrspace = TYPE_ADDR_SPACE (type);
1849 set_mem_attrs (ref, &attrs);
1851 /* If this is already known to be a scalar or aggregate, we are done. */
1852 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1855 /* If it is a reference into an aggregate, this is part of an aggregate.
1856 Otherwise we don't know. */
1857 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1858 || TREE_CODE (t) == ARRAY_RANGE_REF
1859 || TREE_CODE (t) == BIT_FIELD_REF)
1860 MEM_IN_STRUCT_P (ref) = 1;
1864 set_mem_attributes (rtx ref, tree t, int objectp)
1866 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1869 /* Set the alias set of MEM to SET. */
1872 set_mem_alias_set (rtx mem, alias_set_type set)
1874 struct mem_attrs attrs;
1876 /* If the new and old alias sets don't conflict, something is wrong. */
1877 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1878 attrs = *get_mem_attrs (mem);
1880 set_mem_attrs (mem, &attrs);
1883 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1886 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1888 struct mem_attrs attrs;
1890 attrs = *get_mem_attrs (mem);
1891 attrs.addrspace = addrspace;
1892 set_mem_attrs (mem, &attrs);
1895 /* Set the alignment of MEM to ALIGN bits. */
1898 set_mem_align (rtx mem, unsigned int align)
1900 struct mem_attrs attrs;
1902 attrs = *get_mem_attrs (mem);
1903 attrs.align = align;
1904 set_mem_attrs (mem, &attrs);
1907 /* Set the expr for MEM to EXPR. */
1910 set_mem_expr (rtx mem, tree expr)
1912 struct mem_attrs attrs;
1914 attrs = *get_mem_attrs (mem);
1916 set_mem_attrs (mem, &attrs);
1919 /* Set the offset of MEM to OFFSET. */
1922 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
1924 struct mem_attrs attrs;
1926 attrs = *get_mem_attrs (mem);
1927 attrs.offset_known_p = true;
1928 attrs.offset = offset;
1929 set_mem_attrs (mem, &attrs);
1932 /* Clear the offset of MEM. */
1935 clear_mem_offset (rtx mem)
1937 struct mem_attrs attrs;
1939 attrs = *get_mem_attrs (mem);
1940 attrs.offset_known_p = false;
1941 set_mem_attrs (mem, &attrs);
1944 /* Set the size of MEM to SIZE. */
1947 set_mem_size (rtx mem, HOST_WIDE_INT size)
1949 struct mem_attrs attrs;
1951 attrs = *get_mem_attrs (mem);
1952 attrs.size_known_p = true;
1954 set_mem_attrs (mem, &attrs);
1957 /* Clear the size of MEM. */
1960 clear_mem_size (rtx mem)
1962 struct mem_attrs attrs;
1964 attrs = *get_mem_attrs (mem);
1965 attrs.size_known_p = false;
1966 set_mem_attrs (mem, &attrs);
1969 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1970 and its address changed to ADDR. (VOIDmode means don't change the mode.
1971 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1972 returned memory location is required to be valid. The memory
1973 attributes are not changed. */
1976 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1981 gcc_assert (MEM_P (memref));
1982 as = MEM_ADDR_SPACE (memref);
1983 if (mode == VOIDmode)
1984 mode = GET_MODE (memref);
1986 addr = XEXP (memref, 0);
1987 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1988 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1993 if (reload_in_progress || reload_completed)
1994 gcc_assert (memory_address_addr_space_p (mode, addr, as));
1996 addr = memory_address_addr_space (mode, addr, as);
1999 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2002 new_rtx = gen_rtx_MEM (mode, addr);
2003 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2007 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2008 way we are changing MEMREF, so we only preserve the alias set. */
2011 change_address (rtx memref, enum machine_mode mode, rtx addr)
2013 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
2014 enum machine_mode mmode = GET_MODE (new_rtx);
2015 struct mem_attrs attrs, *defattrs;
2017 attrs = *get_mem_attrs (memref);
2018 defattrs = mode_mem_attrs[(int) mmode];
2019 attrs.expr = NULL_TREE;
2020 attrs.offset_known_p = false;
2021 attrs.size_known_p = defattrs->size_known_p;
2022 attrs.size = defattrs->size;
2023 attrs.align = defattrs->align;
2025 /* If there are no changes, just return the original memory reference. */
2026 if (new_rtx == memref)
2028 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2031 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2032 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2035 set_mem_attrs (new_rtx, &attrs);
2039 /* Return a memory reference like MEMREF, but with its mode changed
2040 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2041 nonzero, the memory address is forced to be valid.
2042 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2043 and caller is responsible for adjusting MEMREF base register. */
2046 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2047 int validate, int adjust)
2049 rtx addr = XEXP (memref, 0);
2051 enum machine_mode address_mode;
2053 struct mem_attrs attrs, *defattrs;
2054 unsigned HOST_WIDE_INT max_align;
2056 attrs = *get_mem_attrs (memref);
2058 /* If there are no changes, just return the original memory reference. */
2059 if (mode == GET_MODE (memref) && !offset
2060 && (!validate || memory_address_addr_space_p (mode, addr,
2064 /* ??? Prefer to create garbage instead of creating shared rtl.
2065 This may happen even if offset is nonzero -- consider
2066 (plus (plus reg reg) const_int) -- so do this always. */
2067 addr = copy_rtx (addr);
2069 /* Convert a possibly large offset to a signed value within the
2070 range of the target address space. */
2071 address_mode = targetm.addr_space.address_mode (attrs.addrspace);
2072 pbits = GET_MODE_BITSIZE (address_mode);
2073 if (HOST_BITS_PER_WIDE_INT > pbits)
2075 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2076 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2082 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2083 object, we can merge it into the LO_SUM. */
2084 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2086 && (unsigned HOST_WIDE_INT) offset
2087 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2088 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2089 plus_constant (XEXP (addr, 1), offset));
2091 addr = plus_constant (addr, offset);
2094 new_rtx = change_address_1 (memref, mode, addr, validate);
2096 /* If the address is a REG, change_address_1 rightfully returns memref,
2097 but this would destroy memref's MEM_ATTRS. */
2098 if (new_rtx == memref && offset != 0)
2099 new_rtx = copy_rtx (new_rtx);
2101 /* Compute the new values of the memory attributes due to this adjustment.
2102 We add the offsets and update the alignment. */
2103 if (attrs.offset_known_p)
2104 attrs.offset += offset;
2106 /* Compute the new alignment by taking the MIN of the alignment and the
2107 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2111 max_align = (offset & -offset) * BITS_PER_UNIT;
2112 attrs.align = MIN (attrs.align, max_align);
2115 /* We can compute the size in a number of ways. */
2116 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2117 if (defattrs->size_known_p)
2119 attrs.size_known_p = true;
2120 attrs.size = defattrs->size;
2122 else if (attrs.size_known_p)
2123 attrs.size -= offset;
2125 set_mem_attrs (new_rtx, &attrs);
2127 /* At some point, we should validate that this offset is within the object,
2128 if all the appropriate values are known. */
2132 /* Return a memory reference like MEMREF, but with its mode changed
2133 to MODE and its address changed to ADDR, which is assumed to be
2134 MEMREF offset by OFFSET bytes. If VALIDATE is
2135 nonzero, the memory address is forced to be valid. */
2138 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2139 HOST_WIDE_INT offset, int validate)
2141 memref = change_address_1 (memref, VOIDmode, addr, validate);
2142 return adjust_address_1 (memref, mode, offset, validate, 0);
2145 /* Return a memory reference like MEMREF, but whose address is changed by
2146 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2147 known to be in OFFSET (possibly 1). */
2150 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2152 rtx new_rtx, addr = XEXP (memref, 0);
2153 enum machine_mode address_mode;
2154 struct mem_attrs attrs, *defattrs;
2156 attrs = *get_mem_attrs (memref);
2157 address_mode = targetm.addr_space.address_mode (attrs.addrspace);
2158 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2160 /* At this point we don't know _why_ the address is invalid. It
2161 could have secondary memory references, multiplies or anything.
2163 However, if we did go and rearrange things, we can wind up not
2164 being able to recognize the magic around pic_offset_table_rtx.
2165 This stuff is fragile, and is yet another example of why it is
2166 bad to expose PIC machinery too early. */
2167 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2169 && GET_CODE (addr) == PLUS
2170 && XEXP (addr, 0) == pic_offset_table_rtx)
2172 addr = force_reg (GET_MODE (addr), addr);
2173 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2176 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2177 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2179 /* If there are no changes, just return the original memory reference. */
2180 if (new_rtx == memref)
2183 /* Update the alignment to reflect the offset. Reset the offset, which
2185 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2186 attrs.offset_known_p = false;
2187 attrs.size_known_p = defattrs->size_known_p;
2188 attrs.size = defattrs->size;
2189 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2190 set_mem_attrs (new_rtx, &attrs);
2194 /* Return a memory reference like MEMREF, but with its address changed to
2195 ADDR. The caller is asserting that the actual piece of memory pointed
2196 to is the same, just the form of the address is being changed, such as
2197 by putting something into a register. */
2200 replace_equiv_address (rtx memref, rtx addr)
2202 /* change_address_1 copies the memory attribute structure without change
2203 and that's exactly what we want here. */
2204 update_temp_slot_address (XEXP (memref, 0), addr);
2205 return change_address_1 (memref, VOIDmode, addr, 1);
2208 /* Likewise, but the reference is not required to be valid. */
2211 replace_equiv_address_nv (rtx memref, rtx addr)
2213 return change_address_1 (memref, VOIDmode, addr, 0);
2216 /* Return a memory reference like MEMREF, but with its mode widened to
2217 MODE and offset by OFFSET. This would be used by targets that e.g.
2218 cannot issue QImode memory operations and have to use SImode memory
2219 operations plus masking logic. */
2222 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2224 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2225 struct mem_attrs attrs;
2226 unsigned int size = GET_MODE_SIZE (mode);
2228 /* If there are no changes, just return the original memory reference. */
2229 if (new_rtx == memref)
2232 attrs = *get_mem_attrs (new_rtx);
2234 /* If we don't know what offset we were at within the expression, then
2235 we can't know if we've overstepped the bounds. */
2236 if (! attrs.offset_known_p)
2237 attrs.expr = NULL_TREE;
2241 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2243 tree field = TREE_OPERAND (attrs.expr, 1);
2244 tree offset = component_ref_field_offset (attrs.expr);
2246 if (! DECL_SIZE_UNIT (field))
2248 attrs.expr = NULL_TREE;
2252 /* Is the field at least as large as the access? If so, ok,
2253 otherwise strip back to the containing structure. */
2254 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2255 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2256 && attrs.offset >= 0)
2259 if (! host_integerp (offset, 1))
2261 attrs.expr = NULL_TREE;
2265 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2266 attrs.offset += tree_low_cst (offset, 1);
2267 attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2270 /* Similarly for the decl. */
2271 else if (DECL_P (attrs.expr)
2272 && DECL_SIZE_UNIT (attrs.expr)
2273 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2274 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2275 && (! attrs.offset_known_p || attrs.offset >= 0))
2279 /* The widened memory access overflows the expression, which means
2280 that it could alias another expression. Zap it. */
2281 attrs.expr = NULL_TREE;
2287 attrs.offset_known_p = false;
2289 /* The widened memory may alias other stuff, so zap the alias set. */
2290 /* ??? Maybe use get_alias_set on any remaining expression. */
2292 attrs.size_known_p = true;
2294 set_mem_attrs (new_rtx, &attrs);
2298 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2299 static GTY(()) tree spill_slot_decl;
2302 get_spill_slot_decl (bool force_build_p)
2304 tree d = spill_slot_decl;
2306 struct mem_attrs attrs;
2308 if (d || !force_build_p)
2311 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2312 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2313 DECL_ARTIFICIAL (d) = 1;
2314 DECL_IGNORED_P (d) = 1;
2316 spill_slot_decl = d;
2318 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2319 MEM_NOTRAP_P (rd) = 1;
2320 attrs = *mode_mem_attrs[(int) BLKmode];
2321 attrs.alias = new_alias_set ();
2323 set_mem_attrs (rd, &attrs);
2324 SET_DECL_RTL (d, rd);
2329 /* Given MEM, a result from assign_stack_local, fill in the memory
2330 attributes as appropriate for a register allocator spill slot.
2331 These slots are not aliasable by other memory. We arrange for
2332 them all to use a single MEM_EXPR, so that the aliasing code can
2333 work properly in the case of shared spill slots. */
2336 set_mem_attrs_for_spill (rtx mem)
2338 struct mem_attrs attrs;
2341 attrs = *get_mem_attrs (mem);
2342 attrs.expr = get_spill_slot_decl (true);
2343 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2344 attrs.addrspace = ADDR_SPACE_GENERIC;
2346 /* We expect the incoming memory to be of the form:
2347 (mem:MODE (plus (reg sfp) (const_int offset)))
2348 with perhaps the plus missing for offset = 0. */
2349 addr = XEXP (mem, 0);
2350 attrs.offset_known_p = true;
2352 if (GET_CODE (addr) == PLUS
2353 && CONST_INT_P (XEXP (addr, 1)))
2354 attrs.offset = INTVAL (XEXP (addr, 1));
2356 set_mem_attrs (mem, &attrs);
2357 MEM_NOTRAP_P (mem) = 1;
2360 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2363 gen_label_rtx (void)
2365 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2366 NULL, label_num++, NULL);
2369 /* For procedure integration. */
2371 /* Install new pointers to the first and last insns in the chain.
2372 Also, set cur_insn_uid to one higher than the last in use.
2373 Used for an inline-procedure after copying the insn chain. */
2376 set_new_first_and_last_insn (rtx first, rtx last)
2380 set_first_insn (first);
2381 set_last_insn (last);
2384 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2386 int debug_count = 0;
2388 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2389 cur_debug_insn_uid = 0;
2391 for (insn = first; insn; insn = NEXT_INSN (insn))
2392 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2393 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2396 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2397 if (DEBUG_INSN_P (insn))
2402 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2404 cur_debug_insn_uid++;
2407 for (insn = first; insn; insn = NEXT_INSN (insn))
2408 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2413 /* Go through all the RTL insn bodies and copy any invalid shared
2414 structure. This routine should only be called once. */
2417 unshare_all_rtl_1 (rtx insn)
2419 /* Unshare just about everything else. */
2420 unshare_all_rtl_in_chain (insn);
2422 /* Make sure the addresses of stack slots found outside the insn chain
2423 (such as, in DECL_RTL of a variable) are not shared
2424 with the insn chain.
2426 This special care is necessary when the stack slot MEM does not
2427 actually appear in the insn chain. If it does appear, its address
2428 is unshared from all else at that point. */
2429 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2432 /* Go through all the RTL insn bodies and copy any invalid shared
2433 structure, again. This is a fairly expensive thing to do so it
2434 should be done sparingly. */
2437 unshare_all_rtl_again (rtx insn)
2442 for (p = insn; p; p = NEXT_INSN (p))
2445 reset_used_flags (PATTERN (p));
2446 reset_used_flags (REG_NOTES (p));
2449 /* Make sure that virtual stack slots are not shared. */
2450 set_used_decls (DECL_INITIAL (cfun->decl));
2452 /* Make sure that virtual parameters are not shared. */
2453 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2454 set_used_flags (DECL_RTL (decl));
2456 reset_used_flags (stack_slot_list);
2458 unshare_all_rtl_1 (insn);
2462 unshare_all_rtl (void)
2464 unshare_all_rtl_1 (get_insns ());
2468 struct rtl_opt_pass pass_unshare_all_rtl =
2472 "unshare", /* name */
2474 unshare_all_rtl, /* execute */
2477 0, /* static_pass_number */
2478 TV_NONE, /* tv_id */
2479 0, /* properties_required */
2480 0, /* properties_provided */
2481 0, /* properties_destroyed */
2482 0, /* todo_flags_start */
2483 TODO_verify_rtl_sharing /* todo_flags_finish */
2488 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2489 Recursively does the same for subexpressions. */
2492 verify_rtx_sharing (rtx orig, rtx insn)
2497 const char *format_ptr;
2502 code = GET_CODE (x);
2504 /* These types may be freely shared. */
2523 /* SCRATCH must be shared because they represent distinct values. */
2525 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2530 if (shared_const_p (orig))
2535 /* A MEM is allowed to be shared if its address is constant. */
2536 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2537 || reload_completed || reload_in_progress)
2546 /* This rtx may not be shared. If it has already been seen,
2547 replace it with a copy of itself. */
2548 #ifdef ENABLE_CHECKING
2549 if (RTX_FLAG (x, used))
2551 error ("invalid rtl sharing found in the insn");
2553 error ("shared rtx");
2555 internal_error ("internal consistency failure");
2558 gcc_assert (!RTX_FLAG (x, used));
2560 RTX_FLAG (x, used) = 1;
2562 /* Now scan the subexpressions recursively. */
2564 format_ptr = GET_RTX_FORMAT (code);
2566 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2568 switch (*format_ptr++)
2571 verify_rtx_sharing (XEXP (x, i), insn);
2575 if (XVEC (x, i) != NULL)
2578 int len = XVECLEN (x, i);
2580 for (j = 0; j < len; j++)
2582 /* We allow sharing of ASM_OPERANDS inside single
2584 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2585 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2587 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2589 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2598 /* Go through all the RTL insn bodies and check that there is no unexpected
2599 sharing in between the subexpressions. */
2602 verify_rtl_sharing (void)
2606 timevar_push (TV_VERIFY_RTL_SHARING);
2608 for (p = get_insns (); p; p = NEXT_INSN (p))
2611 reset_used_flags (PATTERN (p));
2612 reset_used_flags (REG_NOTES (p));
2613 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2616 rtx q, sequence = PATTERN (p);
2618 for (i = 0; i < XVECLEN (sequence, 0); i++)
2620 q = XVECEXP (sequence, 0, i);
2621 gcc_assert (INSN_P (q));
2622 reset_used_flags (PATTERN (q));
2623 reset_used_flags (REG_NOTES (q));
2628 for (p = get_insns (); p; p = NEXT_INSN (p))
2631 verify_rtx_sharing (PATTERN (p), p);
2632 verify_rtx_sharing (REG_NOTES (p), p);
2635 timevar_pop (TV_VERIFY_RTL_SHARING);
2638 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2639 Assumes the mark bits are cleared at entry. */
2642 unshare_all_rtl_in_chain (rtx insn)
2644 for (; insn; insn = NEXT_INSN (insn))
2647 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2648 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2652 /* Go through all virtual stack slots of a function and mark them as
2653 shared. We never replace the DECL_RTLs themselves with a copy,
2654 but expressions mentioned into a DECL_RTL cannot be shared with
2655 expressions in the instruction stream.
2657 Note that reload may convert pseudo registers into memories in-place.
2658 Pseudo registers are always shared, but MEMs never are. Thus if we
2659 reset the used flags on MEMs in the instruction stream, we must set
2660 them again on MEMs that appear in DECL_RTLs. */
2663 set_used_decls (tree blk)
2668 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2669 if (DECL_RTL_SET_P (t))
2670 set_used_flags (DECL_RTL (t));
2672 /* Now process sub-blocks. */
2673 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2677 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2678 Recursively does the same for subexpressions. Uses
2679 copy_rtx_if_shared_1 to reduce stack space. */
2682 copy_rtx_if_shared (rtx orig)
2684 copy_rtx_if_shared_1 (&orig);
2688 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2689 use. Recursively does the same for subexpressions. */
2692 copy_rtx_if_shared_1 (rtx *orig1)
2698 const char *format_ptr;
2702 /* Repeat is used to turn tail-recursion into iteration. */
2709 code = GET_CODE (x);
2711 /* These types may be freely shared. */
2728 /* SCRATCH must be shared because they represent distinct values. */
2731 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2736 if (shared_const_p (x))
2746 /* The chain of insns is not being copied. */
2753 /* This rtx may not be shared. If it has already been seen,
2754 replace it with a copy of itself. */
2756 if (RTX_FLAG (x, used))
2758 x = shallow_copy_rtx (x);
2761 RTX_FLAG (x, used) = 1;
2763 /* Now scan the subexpressions recursively.
2764 We can store any replaced subexpressions directly into X
2765 since we know X is not shared! Any vectors in X
2766 must be copied if X was copied. */
2768 format_ptr = GET_RTX_FORMAT (code);
2769 length = GET_RTX_LENGTH (code);
2772 for (i = 0; i < length; i++)
2774 switch (*format_ptr++)
2778 copy_rtx_if_shared_1 (last_ptr);
2779 last_ptr = &XEXP (x, i);
2783 if (XVEC (x, i) != NULL)
2786 int len = XVECLEN (x, i);
2788 /* Copy the vector iff I copied the rtx and the length
2790 if (copied && len > 0)
2791 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2793 /* Call recursively on all inside the vector. */
2794 for (j = 0; j < len; j++)
2797 copy_rtx_if_shared_1 (last_ptr);
2798 last_ptr = &XVECEXP (x, i, j);
2813 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2816 mark_used_flags (rtx x, int flag)
2820 const char *format_ptr;
2823 /* Repeat is used to turn tail-recursion into iteration. */
2828 code = GET_CODE (x);
2830 /* These types may be freely shared so we needn't do any resetting
2855 /* The chain of insns is not being copied. */
2862 RTX_FLAG (x, used) = flag;
2864 format_ptr = GET_RTX_FORMAT (code);
2865 length = GET_RTX_LENGTH (code);
2867 for (i = 0; i < length; i++)
2869 switch (*format_ptr++)
2877 mark_used_flags (XEXP (x, i), flag);
2881 for (j = 0; j < XVECLEN (x, i); j++)
2882 mark_used_flags (XVECEXP (x, i, j), flag);
2888 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2889 to look for shared sub-parts. */
2892 reset_used_flags (rtx x)
2894 mark_used_flags (x, 0);
2897 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2898 to look for shared sub-parts. */
2901 set_used_flags (rtx x)
2903 mark_used_flags (x, 1);
2906 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2907 Return X or the rtx for the pseudo reg the value of X was copied into.
2908 OTHER must be valid as a SET_DEST. */
2911 make_safe_from (rtx x, rtx other)
2914 switch (GET_CODE (other))
2917 other = SUBREG_REG (other);
2919 case STRICT_LOW_PART:
2922 other = XEXP (other, 0);
2931 && GET_CODE (x) != SUBREG)
2933 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2934 || reg_mentioned_p (other, x))))
2936 rtx temp = gen_reg_rtx (GET_MODE (x));
2937 emit_move_insn (temp, x);
2943 /* Emission of insns (adding them to the doubly-linked list). */
2945 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2948 get_last_insn_anywhere (void)
2950 struct sequence_stack *stack;
2951 if (get_last_insn ())
2952 return get_last_insn ();
2953 for (stack = seq_stack; stack; stack = stack->next)
2954 if (stack->last != 0)
2959 /* Return the first nonnote insn emitted in current sequence or current
2960 function. This routine looks inside SEQUENCEs. */
2963 get_first_nonnote_insn (void)
2965 rtx insn = get_insns ();
2970 for (insn = next_insn (insn);
2971 insn && NOTE_P (insn);
2972 insn = next_insn (insn))
2976 if (NONJUMP_INSN_P (insn)
2977 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2978 insn = XVECEXP (PATTERN (insn), 0, 0);
2985 /* Return the last nonnote insn emitted in current sequence or current
2986 function. This routine looks inside SEQUENCEs. */
2989 get_last_nonnote_insn (void)
2991 rtx insn = get_last_insn ();
2996 for (insn = previous_insn (insn);
2997 insn && NOTE_P (insn);
2998 insn = previous_insn (insn))
3002 if (NONJUMP_INSN_P (insn)
3003 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3004 insn = XVECEXP (PATTERN (insn), 0,
3005 XVECLEN (PATTERN (insn), 0) - 1);
3012 /* Return the number of actual (non-debug) insns emitted in this
3016 get_max_insn_count (void)
3018 int n = cur_insn_uid;
3020 /* The table size must be stable across -g, to avoid codegen
3021 differences due to debug insns, and not be affected by
3022 -fmin-insn-uid, to avoid excessive table size and to simplify
3023 debugging of -fcompare-debug failures. */
3024 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3025 n -= cur_debug_insn_uid;
3027 n -= MIN_NONDEBUG_INSN_UID;
3033 /* Return the next insn. If it is a SEQUENCE, return the first insn
3037 next_insn (rtx insn)
3041 insn = NEXT_INSN (insn);
3042 if (insn && NONJUMP_INSN_P (insn)
3043 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3044 insn = XVECEXP (PATTERN (insn), 0, 0);
3050 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3054 previous_insn (rtx insn)
3058 insn = PREV_INSN (insn);
3059 if (insn && NONJUMP_INSN_P (insn)
3060 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3061 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3067 /* Return the next insn after INSN that is not a NOTE. This routine does not
3068 look inside SEQUENCEs. */
3071 next_nonnote_insn (rtx insn)
3075 insn = NEXT_INSN (insn);
3076 if (insn == 0 || !NOTE_P (insn))
3083 /* Return the next insn after INSN that is not a NOTE, but stop the
3084 search before we enter another basic block. This routine does not
3085 look inside SEQUENCEs. */
3088 next_nonnote_insn_bb (rtx insn)
3092 insn = NEXT_INSN (insn);
3093 if (insn == 0 || !NOTE_P (insn))
3095 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3102 /* Return the previous insn before INSN that is not a NOTE. This routine does
3103 not look inside SEQUENCEs. */
3106 prev_nonnote_insn (rtx insn)
3110 insn = PREV_INSN (insn);
3111 if (insn == 0 || !NOTE_P (insn))
3118 /* Return the previous insn before INSN that is not a NOTE, but stop
3119 the search before we enter another basic block. This routine does
3120 not look inside SEQUENCEs. */
3123 prev_nonnote_insn_bb (rtx insn)
3127 insn = PREV_INSN (insn);
3128 if (insn == 0 || !NOTE_P (insn))
3130 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3137 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3138 routine does not look inside SEQUENCEs. */
3141 next_nondebug_insn (rtx insn)
3145 insn = NEXT_INSN (insn);
3146 if (insn == 0 || !DEBUG_INSN_P (insn))
3153 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3154 This routine does not look inside SEQUENCEs. */
3157 prev_nondebug_insn (rtx insn)
3161 insn = PREV_INSN (insn);
3162 if (insn == 0 || !DEBUG_INSN_P (insn))
3169 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3170 This routine does not look inside SEQUENCEs. */
3173 next_nonnote_nondebug_insn (rtx insn)
3177 insn = NEXT_INSN (insn);
3178 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3185 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3186 This routine does not look inside SEQUENCEs. */
3189 prev_nonnote_nondebug_insn (rtx insn)
3193 insn = PREV_INSN (insn);
3194 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3201 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3202 or 0, if there is none. This routine does not look inside
3206 next_real_insn (rtx insn)
3210 insn = NEXT_INSN (insn);
3211 if (insn == 0 || INSN_P (insn))
3218 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3219 or 0, if there is none. This routine does not look inside
3223 prev_real_insn (rtx insn)
3227 insn = PREV_INSN (insn);
3228 if (insn == 0 || INSN_P (insn))
3235 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3236 This routine does not look inside SEQUENCEs. */
3239 last_call_insn (void)
3243 for (insn = get_last_insn ();
3244 insn && !CALL_P (insn);
3245 insn = PREV_INSN (insn))
3251 /* Find the next insn after INSN that really does something. This routine
3252 does not look inside SEQUENCEs. After reload this also skips over
3253 standalone USE and CLOBBER insn. */
3256 active_insn_p (const_rtx insn)
3258 return (CALL_P (insn) || JUMP_P (insn)
3259 || (NONJUMP_INSN_P (insn)
3260 && (! reload_completed
3261 || (GET_CODE (PATTERN (insn)) != USE
3262 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3266 next_active_insn (rtx insn)
3270 insn = NEXT_INSN (insn);
3271 if (insn == 0 || active_insn_p (insn))
3278 /* Find the last insn before INSN that really does something. This routine
3279 does not look inside SEQUENCEs. After reload this also skips over
3280 standalone USE and CLOBBER insn. */
3283 prev_active_insn (rtx insn)
3287 insn = PREV_INSN (insn);
3288 if (insn == 0 || active_insn_p (insn))
3295 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3298 next_label (rtx insn)
3302 insn = NEXT_INSN (insn);
3303 if (insn == 0 || LABEL_P (insn))
3310 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3313 prev_label (rtx insn)
3317 insn = PREV_INSN (insn);
3318 if (insn == 0 || LABEL_P (insn))
3325 /* Return the last label to mark the same position as LABEL. Return LABEL
3326 itself if it is null or any return rtx. */
3329 skip_consecutive_labels (rtx label)
3333 if (label && ANY_RETURN_P (label))
3336 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3344 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3345 and REG_CC_USER notes so we can find it. */
3348 link_cc0_insns (rtx insn)
3350 rtx user = next_nonnote_insn (insn);
3352 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3353 user = XVECEXP (PATTERN (user), 0, 0);
3355 add_reg_note (user, REG_CC_SETTER, insn);
3356 add_reg_note (insn, REG_CC_USER, user);
3359 /* Return the next insn that uses CC0 after INSN, which is assumed to
3360 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3361 applied to the result of this function should yield INSN).
3363 Normally, this is simply the next insn. However, if a REG_CC_USER note
3364 is present, it contains the insn that uses CC0.
3366 Return 0 if we can't find the insn. */
3369 next_cc0_user (rtx insn)
3371 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3374 return XEXP (note, 0);
3376 insn = next_nonnote_insn (insn);
3377 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3378 insn = XVECEXP (PATTERN (insn), 0, 0);
3380 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3386 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3387 note, it is the previous insn. */
3390 prev_cc0_setter (rtx insn)
3392 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3395 return XEXP (note, 0);
3397 insn = prev_nonnote_insn (insn);
3398 gcc_assert (sets_cc0_p (PATTERN (insn)));
3405 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3408 find_auto_inc (rtx *xp, void *data)
3411 rtx reg = (rtx) data;
3413 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3416 switch (GET_CODE (x))
3424 if (rtx_equal_p (reg, XEXP (x, 0)))
3435 /* Increment the label uses for all labels present in rtx. */
3438 mark_label_nuses (rtx x)
3444 code = GET_CODE (x);
3445 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3446 LABEL_NUSES (XEXP (x, 0))++;
3448 fmt = GET_RTX_FORMAT (code);
3449 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3452 mark_label_nuses (XEXP (x, i));
3453 else if (fmt[i] == 'E')
3454 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3455 mark_label_nuses (XVECEXP (x, i, j));
3460 /* Try splitting insns that can be split for better scheduling.
3461 PAT is the pattern which might split.
3462 TRIAL is the insn providing PAT.
3463 LAST is nonzero if we should return the last insn of the sequence produced.
3465 If this routine succeeds in splitting, it returns the first or last
3466 replacement insn depending on the value of LAST. Otherwise, it
3467 returns TRIAL. If the insn to be returned can be split, it will be. */
3470 try_split (rtx pat, rtx trial, int last)
3472 rtx before = PREV_INSN (trial);
3473 rtx after = NEXT_INSN (trial);
3474 int has_barrier = 0;
3477 rtx insn_last, insn;
3480 /* We're not good at redistributing frame information. */
3481 if (RTX_FRAME_RELATED_P (trial))
3484 if (any_condjump_p (trial)
3485 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3486 split_branch_probability = INTVAL (XEXP (note, 0));
3487 probability = split_branch_probability;
3489 seq = split_insns (pat, trial);
3491 split_branch_probability = -1;
3493 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3494 We may need to handle this specially. */
3495 if (after && BARRIER_P (after))
3498 after = NEXT_INSN (after);
3504 /* Avoid infinite loop if any insn of the result matches
3505 the original pattern. */
3509 if (INSN_P (insn_last)
3510 && rtx_equal_p (PATTERN (insn_last), pat))
3512 if (!NEXT_INSN (insn_last))
3514 insn_last = NEXT_INSN (insn_last);
3517 /* We will be adding the new sequence to the function. The splitters
3518 may have introduced invalid RTL sharing, so unshare the sequence now. */
3519 unshare_all_rtl_in_chain (seq);
3522 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3526 mark_jump_label (PATTERN (insn), insn, 0);
3528 if (probability != -1
3529 && any_condjump_p (insn)
3530 && !find_reg_note (insn, REG_BR_PROB, 0))
3532 /* We can preserve the REG_BR_PROB notes only if exactly
3533 one jump is created, otherwise the machine description
3534 is responsible for this step using
3535 split_branch_probability variable. */
3536 gcc_assert (njumps == 1);
3537 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3542 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3543 in SEQ and copy any additional information across. */
3546 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3551 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3552 target may have explicitly specified. */
3553 p = &CALL_INSN_FUNCTION_USAGE (insn);
3556 *p = CALL_INSN_FUNCTION_USAGE (trial);
3558 /* If the old call was a sibling call, the new one must
3560 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3562 /* If the new call is the last instruction in the sequence,
3563 it will effectively replace the old call in-situ. Otherwise
3564 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3565 so that it comes immediately after the new call. */
3566 if (NEXT_INSN (insn))
3567 for (next = NEXT_INSN (trial);
3568 next && NOTE_P (next);
3569 next = NEXT_INSN (next))
3570 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3573 add_insn_after (next, insn, NULL);
3579 /* Copy notes, particularly those related to the CFG. */
3580 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3582 switch (REG_NOTE_KIND (note))
3585 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3590 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3593 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3597 case REG_NON_LOCAL_GOTO:
3598 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3601 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3607 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3609 rtx reg = XEXP (note, 0);
3610 if (!FIND_REG_INC_NOTE (insn, reg)
3611 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3612 add_reg_note (insn, REG_INC, reg);
3622 /* If there are LABELS inside the split insns increment the
3623 usage count so we don't delete the label. */
3627 while (insn != NULL_RTX)
3629 /* JUMP_P insns have already been "marked" above. */
3630 if (NONJUMP_INSN_P (insn))
3631 mark_label_nuses (PATTERN (insn));
3633 insn = PREV_INSN (insn);
3637 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3639 delete_insn (trial);
3641 emit_barrier_after (tem);
3643 /* Recursively call try_split for each new insn created; by the
3644 time control returns here that insn will be fully split, so
3645 set LAST and continue from the insn after the one returned.
3646 We can't use next_active_insn here since AFTER may be a note.
3647 Ignore deleted insns, which can be occur if not optimizing. */
3648 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3649 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3650 tem = try_split (PATTERN (tem), tem, 1);
3652 /* Return either the first or the last insn, depending on which was
3655 ? (after ? PREV_INSN (after) : get_last_insn ())
3656 : NEXT_INSN (before);
3659 /* Make and return an INSN rtx, initializing all its slots.
3660 Store PATTERN in the pattern slots. */
3663 make_insn_raw (rtx pattern)
3667 insn = rtx_alloc (INSN);
3669 INSN_UID (insn) = cur_insn_uid++;
3670 PATTERN (insn) = pattern;
3671 INSN_CODE (insn) = -1;
3672 REG_NOTES (insn) = NULL;
3673 INSN_LOCATOR (insn) = curr_insn_locator ();
3674 BLOCK_FOR_INSN (insn) = NULL;
3676 #ifdef ENABLE_RTL_CHECKING
3679 && (returnjump_p (insn)
3680 || (GET_CODE (insn) == SET
3681 && SET_DEST (insn) == pc_rtx)))
3683 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3691 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3694 make_debug_insn_raw (rtx pattern)
3698 insn = rtx_alloc (DEBUG_INSN);
3699 INSN_UID (insn) = cur_debug_insn_uid++;
3700 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3701 INSN_UID (insn) = cur_insn_uid++;
3703 PATTERN (insn) = pattern;
3704 INSN_CODE (insn) = -1;
3705 REG_NOTES (insn) = NULL;
3706 INSN_LOCATOR (insn) = curr_insn_locator ();
3707 BLOCK_FOR_INSN (insn) = NULL;
3712 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3715 make_jump_insn_raw (rtx pattern)
3719 insn = rtx_alloc (JUMP_INSN);
3720 INSN_UID (insn) = cur_insn_uid++;
3722 PATTERN (insn) = pattern;
3723 INSN_CODE (insn) = -1;
3724 REG_NOTES (insn) = NULL;
3725 JUMP_LABEL (insn) = NULL;
3726 INSN_LOCATOR (insn) = curr_insn_locator ();
3727 BLOCK_FOR_INSN (insn) = NULL;
3732 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3735 make_call_insn_raw (rtx pattern)
3739 insn = rtx_alloc (CALL_INSN);
3740 INSN_UID (insn) = cur_insn_uid++;
3742 PATTERN (insn) = pattern;
3743 INSN_CODE (insn) = -1;
3744 REG_NOTES (insn) = NULL;
3745 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3746 INSN_LOCATOR (insn) = curr_insn_locator ();
3747 BLOCK_FOR_INSN (insn) = NULL;
3752 /* Add INSN to the end of the doubly-linked list.
3753 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3758 PREV_INSN (insn) = get_last_insn();
3759 NEXT_INSN (insn) = 0;
3761 if (NULL != get_last_insn())
3762 NEXT_INSN (get_last_insn ()) = insn;
3764 if (NULL == get_insns ())
3765 set_first_insn (insn);
3767 set_last_insn (insn);
3770 /* Add INSN into the doubly-linked list after insn AFTER. This and
3771 the next should be the only functions called to insert an insn once
3772 delay slots have been filled since only they know how to update a
3776 add_insn_after (rtx insn, rtx after, basic_block bb)
3778 rtx next = NEXT_INSN (after);
3780 gcc_assert (!optimize || !INSN_DELETED_P (after));
3782 NEXT_INSN (insn) = next;
3783 PREV_INSN (insn) = after;
3787 PREV_INSN (next) = insn;
3788 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3789 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3791 else if (get_last_insn () == after)
3792 set_last_insn (insn);
3795 struct sequence_stack *stack = seq_stack;
3796 /* Scan all pending sequences too. */
3797 for (; stack; stack = stack->next)
3798 if (after == stack->last)
3807 if (!BARRIER_P (after)
3808 && !BARRIER_P (insn)
3809 && (bb = BLOCK_FOR_INSN (after)))
3811 set_block_for_insn (insn, bb);
3813 df_insn_rescan (insn);
3814 /* Should not happen as first in the BB is always
3815 either NOTE or LABEL. */
3816 if (BB_END (bb) == after
3817 /* Avoid clobbering of structure when creating new BB. */
3818 && !BARRIER_P (insn)
3819 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3823 NEXT_INSN (after) = insn;
3824 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3826 rtx sequence = PATTERN (after);
3827 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3831 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3832 the previous should be the only functions called to insert an insn
3833 once delay slots have been filled since only they know how to
3834 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3838 add_insn_before (rtx insn, rtx before, basic_block bb)
3840 rtx prev = PREV_INSN (before);
3842 gcc_assert (!optimize || !INSN_DELETED_P (before));
3844 PREV_INSN (insn) = prev;
3845 NEXT_INSN (insn) = before;
3849 NEXT_INSN (prev) = insn;
3850 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3852 rtx sequence = PATTERN (prev);
3853 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3856 else if (get_insns () == before)
3857 set_first_insn (insn);
3860 struct sequence_stack *stack = seq_stack;
3861 /* Scan all pending sequences too. */
3862 for (; stack; stack = stack->next)
3863 if (before == stack->first)
3865 stack->first = insn;
3873 && !BARRIER_P (before)
3874 && !BARRIER_P (insn))
3875 bb = BLOCK_FOR_INSN (before);
3879 set_block_for_insn (insn, bb);
3881 df_insn_rescan (insn);
3882 /* Should not happen as first in the BB is always either NOTE or
3884 gcc_assert (BB_HEAD (bb) != insn
3885 /* Avoid clobbering of structure when creating new BB. */
3887 || NOTE_INSN_BASIC_BLOCK_P (insn));
3890 PREV_INSN (before) = insn;
3891 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3892 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3896 /* Replace insn with an deleted instruction note. */
3899 set_insn_deleted (rtx insn)
3901 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3902 PUT_CODE (insn, NOTE);
3903 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3907 /* Remove an insn from its doubly-linked list. This function knows how
3908 to handle sequences. */
3910 remove_insn (rtx insn)
3912 rtx next = NEXT_INSN (insn);
3913 rtx prev = PREV_INSN (insn);
3916 /* Later in the code, the block will be marked dirty. */
3917 df_insn_delete (NULL, INSN_UID (insn));
3921 NEXT_INSN (prev) = next;
3922 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3924 rtx sequence = PATTERN (prev);
3925 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3928 else if (get_insns () == insn)
3931 PREV_INSN (next) = NULL;
3932 set_first_insn (next);
3936 struct sequence_stack *stack = seq_stack;
3937 /* Scan all pending sequences too. */
3938 for (; stack; stack = stack->next)
3939 if (insn == stack->first)
3941 stack->first = next;
3950 PREV_INSN (next) = prev;
3951 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3952 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3954 else if (get_last_insn () == insn)
3955 set_last_insn (prev);
3958 struct sequence_stack *stack = seq_stack;
3959 /* Scan all pending sequences too. */
3960 for (; stack; stack = stack->next)
3961 if (insn == stack->last)
3969 if (!BARRIER_P (insn)
3970 && (bb = BLOCK_FOR_INSN (insn)))
3972 if (NONDEBUG_INSN_P (insn))
3973 df_set_bb_dirty (bb);
3974 if (BB_HEAD (bb) == insn)
3976 /* Never ever delete the basic block note without deleting whole
3978 gcc_assert (!NOTE_P (insn));
3979 BB_HEAD (bb) = next;
3981 if (BB_END (bb) == insn)
3986 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3989 add_function_usage_to (rtx call_insn, rtx call_fusage)
3991 gcc_assert (call_insn && CALL_P (call_insn));
3993 /* Put the register usage information on the CALL. If there is already
3994 some usage information, put ours at the end. */
3995 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3999 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4000 link = XEXP (link, 1))
4003 XEXP (link, 1) = call_fusage;
4006 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4009 /* Delete all insns made since FROM.
4010 FROM becomes the new last instruction. */
4013 delete_insns_since (rtx from)
4018 NEXT_INSN (from) = 0;
4019 set_last_insn (from);
4022 /* This function is deprecated, please use sequences instead.
4024 Move a consecutive bunch of insns to a different place in the chain.
4025 The insns to be moved are those between FROM and TO.
4026 They are moved to a new position after the insn AFTER.
4027 AFTER must not be FROM or TO or any insn in between.
4029 This function does not know about SEQUENCEs and hence should not be
4030 called after delay-slot filling has been done. */
4033 reorder_insns_nobb (rtx from, rtx to, rtx after)
4035 #ifdef ENABLE_CHECKING
4037 for (x = from; x != to; x = NEXT_INSN (x))
4038 gcc_assert (after != x);
4039 gcc_assert (after != to);
4042 /* Splice this bunch out of where it is now. */
4043 if (PREV_INSN (from))
4044 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4046 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4047 if (get_last_insn () == to)
4048 set_last_insn (PREV_INSN (from));
4049 if (get_insns () == from)
4050 set_first_insn (NEXT_INSN (to));
4052 /* Make the new neighbors point to it and it to them. */
4053 if (NEXT_INSN (after))
4054 PREV_INSN (NEXT_INSN (after)) = to;
4056 NEXT_INSN (to) = NEXT_INSN (after);
4057 PREV_INSN (from) = after;
4058 NEXT_INSN (after) = from;
4059 if (after == get_last_insn())
4063 /* Same as function above, but take care to update BB boundaries. */
4065 reorder_insns (rtx from, rtx to, rtx after)
4067 rtx prev = PREV_INSN (from);
4068 basic_block bb, bb2;
4070 reorder_insns_nobb (from, to, after);
4072 if (!BARRIER_P (after)
4073 && (bb = BLOCK_FOR_INSN (after)))
4076 df_set_bb_dirty (bb);
4078 if (!BARRIER_P (from)
4079 && (bb2 = BLOCK_FOR_INSN (from)))
4081 if (BB_END (bb2) == to)
4082 BB_END (bb2) = prev;
4083 df_set_bb_dirty (bb2);
4086 if (BB_END (bb) == after)
4089 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4091 df_insn_change_bb (x, bb);
4096 /* Emit insn(s) of given code and pattern
4097 at a specified place within the doubly-linked list.
4099 All of the emit_foo global entry points accept an object
4100 X which is either an insn list or a PATTERN of a single
4103 There are thus a few canonical ways to generate code and
4104 emit it at a specific place in the instruction stream. For
4105 example, consider the instruction named SPOT and the fact that
4106 we would like to emit some instructions before SPOT. We might
4110 ... emit the new instructions ...
4111 insns_head = get_insns ();
4114 emit_insn_before (insns_head, SPOT);
4116 It used to be common to generate SEQUENCE rtl instead, but that
4117 is a relic of the past which no longer occurs. The reason is that
4118 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4119 generated would almost certainly die right after it was created. */
4122 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4123 rtx (*make_raw) (rtx))
4127 gcc_assert (before);
4132 switch (GET_CODE (x))
4144 rtx next = NEXT_INSN (insn);
4145 add_insn_before (insn, before, bb);
4151 #ifdef ENABLE_RTL_CHECKING
4158 last = (*make_raw) (x);
4159 add_insn_before (last, before, bb);
4166 /* Make X be output before the instruction BEFORE. */
4169 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4171 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4174 /* Make an instruction with body X and code JUMP_INSN
4175 and output it before the instruction BEFORE. */
4178 emit_jump_insn_before_noloc (rtx x, rtx before)
4180 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4181 make_jump_insn_raw);
4184 /* Make an instruction with body X and code CALL_INSN
4185 and output it before the instruction BEFORE. */
4188 emit_call_insn_before_noloc (rtx x, rtx before)
4190 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4191 make_call_insn_raw);
4194 /* Make an instruction with body X and code DEBUG_INSN
4195 and output it before the instruction BEFORE. */
4198 emit_debug_insn_before_noloc (rtx x, rtx before)
4200 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4201 make_debug_insn_raw);
4204 /* Make an insn of code BARRIER
4205 and output it before the insn BEFORE. */
4208 emit_barrier_before (rtx before)
4210 rtx insn = rtx_alloc (BARRIER);
4212 INSN_UID (insn) = cur_insn_uid++;
4214 add_insn_before (insn, before, NULL);
4218 /* Emit the label LABEL before the insn BEFORE. */
4221 emit_label_before (rtx label, rtx before)
4223 /* This can be called twice for the same label as a result of the
4224 confusion that follows a syntax error! So make it harmless. */
4225 if (INSN_UID (label) == 0)
4227 INSN_UID (label) = cur_insn_uid++;
4228 add_insn_before (label, before, NULL);
4234 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4237 emit_note_before (enum insn_note subtype, rtx before)
4239 rtx note = rtx_alloc (NOTE);
4240 INSN_UID (note) = cur_insn_uid++;
4241 NOTE_KIND (note) = subtype;
4242 BLOCK_FOR_INSN (note) = NULL;
4243 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4245 add_insn_before (note, before, NULL);
4249 /* Helper for emit_insn_after, handles lists of instructions
4253 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4257 if (!bb && !BARRIER_P (after))
4258 bb = BLOCK_FOR_INSN (after);
4262 df_set_bb_dirty (bb);
4263 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4264 if (!BARRIER_P (last))
4266 set_block_for_insn (last, bb);
4267 df_insn_rescan (last);
4269 if (!BARRIER_P (last))
4271 set_block_for_insn (last, bb);
4272 df_insn_rescan (last);
4274 if (BB_END (bb) == after)
4278 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4281 after_after = NEXT_INSN (after);
4283 NEXT_INSN (after) = first;
4284 PREV_INSN (first) = after;
4285 NEXT_INSN (last) = after_after;
4287 PREV_INSN (after_after) = last;
4289 if (after == get_last_insn())
4290 set_last_insn (last);
4296 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4297 rtx (*make_raw)(rtx))
4306 switch (GET_CODE (x))
4315 last = emit_insn_after_1 (x, after, bb);
4318 #ifdef ENABLE_RTL_CHECKING
4325 last = (*make_raw) (x);
4326 add_insn_after (last, after, bb);
4333 /* Make X be output after the insn AFTER and set the BB of insn. If
4334 BB is NULL, an attempt is made to infer the BB from AFTER. */
4337 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4339 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4343 /* Make an insn of code JUMP_INSN with body X
4344 and output it after the insn AFTER. */
4347 emit_jump_insn_after_noloc (rtx x, rtx after)
4349 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4352 /* Make an instruction with body X and code CALL_INSN
4353 and output it after the instruction AFTER. */
4356 emit_call_insn_after_noloc (rtx x, rtx after)
4358 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4361 /* Make an instruction with body X and code CALL_INSN
4362 and output it after the instruction AFTER. */
4365 emit_debug_insn_after_noloc (rtx x, rtx after)
4367 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4370 /* Make an insn of code BARRIER
4371 and output it after the insn AFTER. */
4374 emit_barrier_after (rtx after)
4376 rtx insn = rtx_alloc (BARRIER);
4378 INSN_UID (insn) = cur_insn_uid++;
4380 add_insn_after (insn, after, NULL);
4384 /* Emit the label LABEL after the insn AFTER. */
4387 emit_label_after (rtx label, rtx after)
4389 /* This can be called twice for the same label
4390 as a result of the confusion that follows a syntax error!
4391 So make it harmless. */
4392 if (INSN_UID (label) == 0)
4394 INSN_UID (label) = cur_insn_uid++;
4395 add_insn_after (label, after, NULL);
4401 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4404 emit_note_after (enum insn_note subtype, rtx after)
4406 rtx note = rtx_alloc (NOTE);
4407 INSN_UID (note) = cur_insn_uid++;
4408 NOTE_KIND (note) = subtype;
4409 BLOCK_FOR_INSN (note) = NULL;
4410 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4411 add_insn_after (note, after, NULL);
4415 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4416 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4419 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4420 rtx (*make_raw) (rtx))
4422 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4424 if (pattern == NULL_RTX || !loc)
4427 after = NEXT_INSN (after);
4430 if (active_insn_p (after) && !INSN_LOCATOR (after))
4431 INSN_LOCATOR (after) = loc;
4434 after = NEXT_INSN (after);
4439 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4440 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4444 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4445 rtx (*make_raw) (rtx))
4449 if (skip_debug_insns)
4450 while (DEBUG_INSN_P (prev))
4451 prev = PREV_INSN (prev);
4454 return emit_pattern_after_setloc (pattern, after, INSN_LOCATOR (prev),
4457 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4460 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4462 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4464 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4467 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4469 emit_insn_after (rtx pattern, rtx after)
4471 return emit_pattern_after (pattern, after, true, make_insn_raw);
4474 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4476 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4478 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4481 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4483 emit_jump_insn_after (rtx pattern, rtx after)
4485 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4488 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4490 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4492 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4495 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4497 emit_call_insn_after (rtx pattern, rtx after)
4499 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4502 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4504 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4506 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4509 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4511 emit_debug_insn_after (rtx pattern, rtx after)
4513 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4516 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4517 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4518 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4522 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4523 rtx (*make_raw) (rtx))
4525 rtx first = PREV_INSN (before);
4526 rtx last = emit_pattern_before_noloc (pattern, before,
4527 insnp ? before : NULL_RTX,
4530 if (pattern == NULL_RTX || !loc)
4534 first = get_insns ();
4536 first = NEXT_INSN (first);
4539 if (active_insn_p (first) && !INSN_LOCATOR (first))
4540 INSN_LOCATOR (first) = loc;
4543 first = NEXT_INSN (first);
4548 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4549 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4550 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4551 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4554 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4555 bool insnp, rtx (*make_raw) (rtx))
4559 if (skip_debug_insns)
4560 while (DEBUG_INSN_P (next))
4561 next = PREV_INSN (next);
4564 return emit_pattern_before_setloc (pattern, before, INSN_LOCATOR (next),
4567 return emit_pattern_before_noloc (pattern, before,
4568 insnp ? before : NULL_RTX,
4572 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4574 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4576 return emit_pattern_before_setloc (pattern, before, loc, true,
4580 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4582 emit_insn_before (rtx pattern, rtx before)
4584 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4587 /* like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4589 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4591 return emit_pattern_before_setloc (pattern, before, loc, false,
4592 make_jump_insn_raw);
4595 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4597 emit_jump_insn_before (rtx pattern, rtx before)
4599 return emit_pattern_before (pattern, before, true, false,
4600 make_jump_insn_raw);
4603 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4605 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4607 return emit_pattern_before_setloc (pattern, before, loc, false,
4608 make_call_insn_raw);
4611 /* Like emit_call_insn_before_noloc,
4612 but set insn_locator according to BEFORE. */
4614 emit_call_insn_before (rtx pattern, rtx before)
4616 return emit_pattern_before (pattern, before, true, false,
4617 make_call_insn_raw);
4620 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4622 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4624 return emit_pattern_before_setloc (pattern, before, loc, false,
4625 make_debug_insn_raw);
4628 /* Like emit_debug_insn_before_noloc,
4629 but set insn_locator according to BEFORE. */
4631 emit_debug_insn_before (rtx pattern, rtx before)
4633 return emit_pattern_before (pattern, before, false, false,
4634 make_debug_insn_raw);
4637 /* Take X and emit it at the end of the doubly-linked
4640 Returns the last insn emitted. */
4645 rtx last = get_last_insn();
4651 switch (GET_CODE (x))
4663 rtx next = NEXT_INSN (insn);
4670 #ifdef ENABLE_RTL_CHECKING
4677 last = make_insn_raw (x);
4685 /* Make an insn of code DEBUG_INSN with pattern X
4686 and add it to the end of the doubly-linked list. */
4689 emit_debug_insn (rtx x)
4691 rtx last = get_last_insn();
4697 switch (GET_CODE (x))
4709 rtx next = NEXT_INSN (insn);
4716 #ifdef ENABLE_RTL_CHECKING
4723 last = make_debug_insn_raw (x);
4731 /* Make an insn of code JUMP_INSN with pattern X
4732 and add it to the end of the doubly-linked list. */
4735 emit_jump_insn (rtx x)
4737 rtx last = NULL_RTX, insn;
4739 switch (GET_CODE (x))
4751 rtx next = NEXT_INSN (insn);
4758 #ifdef ENABLE_RTL_CHECKING
4765 last = make_jump_insn_raw (x);
4773 /* Make an insn of code CALL_INSN with pattern X
4774 and add it to the end of the doubly-linked list. */
4777 emit_call_insn (rtx x)
4781 switch (GET_CODE (x))
4790 insn = emit_insn (x);
4793 #ifdef ENABLE_RTL_CHECKING
4800 insn = make_call_insn_raw (x);
4808 /* Add the label LABEL to the end of the doubly-linked list. */
4811 emit_label (rtx label)
4813 /* This can be called twice for the same label
4814 as a result of the confusion that follows a syntax error!
4815 So make it harmless. */
4816 if (INSN_UID (label) == 0)
4818 INSN_UID (label) = cur_insn_uid++;
4824 /* Make an insn of code BARRIER
4825 and add it to the end of the doubly-linked list. */
4830 rtx barrier = rtx_alloc (BARRIER);
4831 INSN_UID (barrier) = cur_insn_uid++;
4836 /* Emit a copy of note ORIG. */
4839 emit_note_copy (rtx orig)
4843 note = rtx_alloc (NOTE);
4845 INSN_UID (note) = cur_insn_uid++;
4846 NOTE_DATA (note) = NOTE_DATA (orig);
4847 NOTE_KIND (note) = NOTE_KIND (orig);
4848 BLOCK_FOR_INSN (note) = NULL;
4854 /* Make an insn of code NOTE or type NOTE_NO
4855 and add it to the end of the doubly-linked list. */
4858 emit_note (enum insn_note kind)
4862 note = rtx_alloc (NOTE);
4863 INSN_UID (note) = cur_insn_uid++;
4864 NOTE_KIND (note) = kind;
4865 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4866 BLOCK_FOR_INSN (note) = NULL;
4871 /* Emit a clobber of lvalue X. */
4874 emit_clobber (rtx x)
4876 /* CONCATs should not appear in the insn stream. */
4877 if (GET_CODE (x) == CONCAT)
4879 emit_clobber (XEXP (x, 0));
4880 return emit_clobber (XEXP (x, 1));
4882 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4885 /* Return a sequence of insns to clobber lvalue X. */
4899 /* Emit a use of rvalue X. */
4904 /* CONCATs should not appear in the insn stream. */
4905 if (GET_CODE (x) == CONCAT)
4907 emit_use (XEXP (x, 0));
4908 return emit_use (XEXP (x, 1));
4910 return emit_insn (gen_rtx_USE (VOIDmode, x));
4913 /* Return a sequence of insns to use rvalue X. */
4927 /* Cause next statement to emit a line note even if the line number
4931 force_next_line_note (void)
4936 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4937 note of this type already exists, remove it first. */
4940 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4942 rtx note = find_reg_note (insn, kind, NULL_RTX);
4948 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4949 has multiple sets (some callers assume single_set
4950 means the insn only has one set, when in fact it
4951 means the insn only has one * useful * set). */
4952 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4958 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4959 It serves no useful purpose and breaks eliminate_regs. */
4960 if (GET_CODE (datum) == ASM_OPERANDS)
4965 XEXP (note, 0) = datum;
4966 df_notes_rescan (insn);
4974 XEXP (note, 0) = datum;
4980 add_reg_note (insn, kind, datum);
4986 df_notes_rescan (insn);
4992 return REG_NOTES (insn);
4995 /* Return an indication of which type of insn should have X as a body.
4996 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4998 static enum rtx_code
4999 classify_insn (rtx x)
5003 if (GET_CODE (x) == CALL)
5005 if (GET_CODE (x) == RETURN)
5007 if (GET_CODE (x) == SET)
5009 if (SET_DEST (x) == pc_rtx)
5011 else if (GET_CODE (SET_SRC (x)) == CALL)
5016 if (GET_CODE (x) == PARALLEL)
5019 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5020 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5022 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5023 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5025 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5026 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5032 /* Emit the rtl pattern X as an appropriate kind of insn.
5033 If X is a label, it is simply added into the insn chain. */
5038 enum rtx_code code = classify_insn (x);
5043 return emit_label (x);
5045 return emit_insn (x);
5048 rtx insn = emit_jump_insn (x);
5049 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5050 return emit_barrier ();
5054 return emit_call_insn (x);
5056 return emit_debug_insn (x);
5062 /* Space for free sequence stack entries. */
5063 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5065 /* Begin emitting insns to a sequence. If this sequence will contain
5066 something that might cause the compiler to pop arguments to function
5067 calls (because those pops have previously been deferred; see
5068 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5069 before calling this function. That will ensure that the deferred
5070 pops are not accidentally emitted in the middle of this sequence. */
5073 start_sequence (void)
5075 struct sequence_stack *tem;
5077 if (free_sequence_stack != NULL)
5079 tem = free_sequence_stack;
5080 free_sequence_stack = tem->next;
5083 tem = ggc_alloc_sequence_stack ();
5085 tem->next = seq_stack;
5086 tem->first = get_insns ();
5087 tem->last = get_last_insn ();
5095 /* Set up the insn chain starting with FIRST as the current sequence,
5096 saving the previously current one. See the documentation for
5097 start_sequence for more information about how to use this function. */
5100 push_to_sequence (rtx first)
5106 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5109 set_first_insn (first);
5110 set_last_insn (last);
5113 /* Like push_to_sequence, but take the last insn as an argument to avoid
5114 looping through the list. */
5117 push_to_sequence2 (rtx first, rtx last)
5121 set_first_insn (first);
5122 set_last_insn (last);
5125 /* Set up the outer-level insn chain
5126 as the current sequence, saving the previously current one. */
5129 push_topmost_sequence (void)
5131 struct sequence_stack *stack, *top = NULL;
5135 for (stack = seq_stack; stack; stack = stack->next)
5138 set_first_insn (top->first);
5139 set_last_insn (top->last);
5142 /* After emitting to the outer-level insn chain, update the outer-level
5143 insn chain, and restore the previous saved state. */
5146 pop_topmost_sequence (void)
5148 struct sequence_stack *stack, *top = NULL;
5150 for (stack = seq_stack; stack; stack = stack->next)
5153 top->first = get_insns ();
5154 top->last = get_last_insn ();
5159 /* After emitting to a sequence, restore previous saved state.
5161 To get the contents of the sequence just made, you must call
5162 `get_insns' *before* calling here.
5164 If the compiler might have deferred popping arguments while
5165 generating this sequence, and this sequence will not be immediately
5166 inserted into the instruction stream, use do_pending_stack_adjust
5167 before calling get_insns. That will ensure that the deferred
5168 pops are inserted into this sequence, and not into some random
5169 location in the instruction stream. See INHIBIT_DEFER_POP for more
5170 information about deferred popping of arguments. */
5175 struct sequence_stack *tem = seq_stack;
5177 set_first_insn (tem->first);
5178 set_last_insn (tem->last);
5179 seq_stack = tem->next;
5181 memset (tem, 0, sizeof (*tem));
5182 tem->next = free_sequence_stack;
5183 free_sequence_stack = tem;
5186 /* Return 1 if currently emitting into a sequence. */
5189 in_sequence_p (void)
5191 return seq_stack != 0;
5194 /* Put the various virtual registers into REGNO_REG_RTX. */
5197 init_virtual_regs (void)
5199 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5200 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5201 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5202 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5203 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5204 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5205 = virtual_preferred_stack_boundary_rtx;
5209 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5210 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5211 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5212 static int copy_insn_n_scratches;
5214 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5215 copied an ASM_OPERANDS.
5216 In that case, it is the original input-operand vector. */
5217 static rtvec orig_asm_operands_vector;
5219 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5220 copied an ASM_OPERANDS.
5221 In that case, it is the copied input-operand vector. */
5222 static rtvec copy_asm_operands_vector;
5224 /* Likewise for the constraints vector. */
5225 static rtvec orig_asm_constraints_vector;
5226 static rtvec copy_asm_constraints_vector;
5228 /* Recursively create a new copy of an rtx for copy_insn.
5229 This function differs from copy_rtx in that it handles SCRATCHes and
5230 ASM_OPERANDs properly.
5231 Normally, this function is not used directly; use copy_insn as front end.
5232 However, you could first copy an insn pattern with copy_insn and then use
5233 this function afterwards to properly copy any REG_NOTEs containing
5237 copy_insn_1 (rtx orig)
5242 const char *format_ptr;
5247 code = GET_CODE (orig);
5262 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5267 for (i = 0; i < copy_insn_n_scratches; i++)
5268 if (copy_insn_scratch_in[i] == orig)
5269 return copy_insn_scratch_out[i];
5273 if (shared_const_p (orig))
5277 /* A MEM with a constant address is not sharable. The problem is that
5278 the constant address may need to be reloaded. If the mem is shared,
5279 then reloading one copy of this mem will cause all copies to appear
5280 to have been reloaded. */
5286 /* Copy the various flags, fields, and other information. We assume
5287 that all fields need copying, and then clear the fields that should
5288 not be copied. That is the sensible default behavior, and forces
5289 us to explicitly document why we are *not* copying a flag. */
5290 copy = shallow_copy_rtx (orig);
5292 /* We do not copy the USED flag, which is used as a mark bit during
5293 walks over the RTL. */
5294 RTX_FLAG (copy, used) = 0;
5296 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5299 RTX_FLAG (copy, jump) = 0;
5300 RTX_FLAG (copy, call) = 0;
5301 RTX_FLAG (copy, frame_related) = 0;
5304 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5306 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5307 switch (*format_ptr++)
5310 if (XEXP (orig, i) != NULL)
5311 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5316 if (XVEC (orig, i) == orig_asm_constraints_vector)
5317 XVEC (copy, i) = copy_asm_constraints_vector;
5318 else if (XVEC (orig, i) == orig_asm_operands_vector)
5319 XVEC (copy, i) = copy_asm_operands_vector;
5320 else if (XVEC (orig, i) != NULL)
5322 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5323 for (j = 0; j < XVECLEN (copy, i); j++)
5324 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5335 /* These are left unchanged. */
5342 if (code == SCRATCH)
5344 i = copy_insn_n_scratches++;
5345 gcc_assert (i < MAX_RECOG_OPERANDS);
5346 copy_insn_scratch_in[i] = orig;
5347 copy_insn_scratch_out[i] = copy;
5349 else if (code == ASM_OPERANDS)
5351 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5352 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5353 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5354 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5360 /* Create a new copy of an rtx.
5361 This function differs from copy_rtx in that it handles SCRATCHes and
5362 ASM_OPERANDs properly.
5363 INSN doesn't really have to be a full INSN; it could be just the
5366 copy_insn (rtx insn)
5368 copy_insn_n_scratches = 0;
5369 orig_asm_operands_vector = 0;
5370 orig_asm_constraints_vector = 0;
5371 copy_asm_operands_vector = 0;
5372 copy_asm_constraints_vector = 0;
5373 return copy_insn_1 (insn);
5376 /* Initialize data structures and variables in this file
5377 before generating rtl for each function. */
5382 set_first_insn (NULL);
5383 set_last_insn (NULL);
5384 if (MIN_NONDEBUG_INSN_UID)
5385 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5388 cur_debug_insn_uid = 1;
5389 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5390 last_location = UNKNOWN_LOCATION;
5391 first_label_num = label_num;
5394 /* Init the tables that describe all the pseudo regs. */
5396 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5398 crtl->emit.regno_pointer_align
5399 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5401 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5403 /* Put copies of all the hard registers into regno_reg_rtx. */
5404 memcpy (regno_reg_rtx,
5405 initial_regno_reg_rtx,
5406 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5408 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5409 init_virtual_regs ();
5411 /* Indicate that the virtual registers and stack locations are
5413 REG_POINTER (stack_pointer_rtx) = 1;
5414 REG_POINTER (frame_pointer_rtx) = 1;
5415 REG_POINTER (hard_frame_pointer_rtx) = 1;
5416 REG_POINTER (arg_pointer_rtx) = 1;
5418 REG_POINTER (virtual_incoming_args_rtx) = 1;
5419 REG_POINTER (virtual_stack_vars_rtx) = 1;
5420 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5421 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5422 REG_POINTER (virtual_cfa_rtx) = 1;
5424 #ifdef STACK_BOUNDARY
5425 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5426 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5427 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5428 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5430 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5431 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5432 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5433 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5434 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5437 #ifdef INIT_EXPANDERS
5442 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5445 gen_const_vector (enum machine_mode mode, int constant)
5450 enum machine_mode inner;
5452 units = GET_MODE_NUNITS (mode);
5453 inner = GET_MODE_INNER (mode);
5455 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5457 v = rtvec_alloc (units);
5459 /* We need to call this function after we set the scalar const_tiny_rtx
5461 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5463 for (i = 0; i < units; ++i)
5464 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5466 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5470 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5471 all elements are zero, and the one vector when all elements are one. */
5473 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5475 enum machine_mode inner = GET_MODE_INNER (mode);
5476 int nunits = GET_MODE_NUNITS (mode);
5480 /* Check to see if all of the elements have the same value. */
5481 x = RTVEC_ELT (v, nunits - 1);
5482 for (i = nunits - 2; i >= 0; i--)
5483 if (RTVEC_ELT (v, i) != x)
5486 /* If the values are all the same, check to see if we can use one of the
5487 standard constant vectors. */
5490 if (x == CONST0_RTX (inner))
5491 return CONST0_RTX (mode);
5492 else if (x == CONST1_RTX (inner))
5493 return CONST1_RTX (mode);
5496 return gen_rtx_raw_CONST_VECTOR (mode, v);
5499 /* Initialise global register information required by all functions. */
5502 init_emit_regs (void)
5505 enum machine_mode mode;
5508 /* Reset register attributes */
5509 htab_empty (reg_attrs_htab);
5511 /* We need reg_raw_mode, so initialize the modes now. */
5512 init_reg_modes_target ();
5514 /* Assign register numbers to the globally defined register rtx. */
5515 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5516 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5517 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
5518 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5519 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5520 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5521 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5522 virtual_incoming_args_rtx =
5523 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5524 virtual_stack_vars_rtx =
5525 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5526 virtual_stack_dynamic_rtx =
5527 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5528 virtual_outgoing_args_rtx =
5529 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5530 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5531 virtual_preferred_stack_boundary_rtx =
5532 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5534 /* Initialize RTL for commonly used hard registers. These are
5535 copied into regno_reg_rtx as we begin to compile each function. */
5536 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5537 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5539 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5540 return_address_pointer_rtx
5541 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5544 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5545 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5547 pic_offset_table_rtx = NULL_RTX;
5549 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5551 mode = (enum machine_mode) i;
5552 attrs = ggc_alloc_cleared_mem_attrs ();
5553 attrs->align = BITS_PER_UNIT;
5554 attrs->addrspace = ADDR_SPACE_GENERIC;
5555 if (mode != BLKmode)
5557 attrs->size_known_p = true;
5558 attrs->size = GET_MODE_SIZE (mode);
5559 if (STRICT_ALIGNMENT)
5560 attrs->align = GET_MODE_ALIGNMENT (mode);
5562 mode_mem_attrs[i] = attrs;
5566 /* Create some permanent unique rtl objects shared between all functions. */
5569 init_emit_once (void)
5572 enum machine_mode mode;
5573 enum machine_mode double_mode;
5575 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5577 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5578 const_int_htab_eq, NULL);
5580 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5581 const_double_htab_eq, NULL);
5583 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5584 const_fixed_htab_eq, NULL);
5586 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5587 mem_attrs_htab_eq, NULL);
5588 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5589 reg_attrs_htab_eq, NULL);
5591 /* Compute the word and byte modes. */
5593 byte_mode = VOIDmode;
5594 word_mode = VOIDmode;
5595 double_mode = VOIDmode;
5597 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5599 mode = GET_MODE_WIDER_MODE (mode))
5601 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5602 && byte_mode == VOIDmode)
5605 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5606 && word_mode == VOIDmode)
5610 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5612 mode = GET_MODE_WIDER_MODE (mode))
5614 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5615 && double_mode == VOIDmode)
5619 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5621 #ifdef INIT_EXPANDERS
5622 /* This is to initialize {init|mark|free}_machine_status before the first
5623 call to push_function_context_to. This is needed by the Chill front
5624 end which calls push_function_context_to before the first call to
5625 init_function_start. */
5629 /* Create the unique rtx's for certain rtx codes and operand values. */
5631 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5632 tries to use these variables. */
5633 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5634 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5635 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5637 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5638 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5639 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5641 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5643 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5644 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5645 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5650 dconsthalf = dconst1;
5651 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5653 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5655 const REAL_VALUE_TYPE *const r =
5656 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5658 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5660 mode = GET_MODE_WIDER_MODE (mode))
5661 const_tiny_rtx[i][(int) mode] =
5662 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5664 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5666 mode = GET_MODE_WIDER_MODE (mode))
5667 const_tiny_rtx[i][(int) mode] =
5668 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5670 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5672 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5674 mode = GET_MODE_WIDER_MODE (mode))
5675 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5677 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5679 mode = GET_MODE_WIDER_MODE (mode))
5680 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5683 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5685 mode = GET_MODE_WIDER_MODE (mode))
5687 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5688 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5691 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5693 mode = GET_MODE_WIDER_MODE (mode))
5695 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5696 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5699 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5701 mode = GET_MODE_WIDER_MODE (mode))
5703 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5704 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5707 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5709 mode = GET_MODE_WIDER_MODE (mode))
5711 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5712 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5715 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5717 mode = GET_MODE_WIDER_MODE (mode))
5719 FCONST0(mode).data.high = 0;
5720 FCONST0(mode).data.low = 0;
5721 FCONST0(mode).mode = mode;
5722 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5723 FCONST0 (mode), mode);
5726 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5728 mode = GET_MODE_WIDER_MODE (mode))
5730 FCONST0(mode).data.high = 0;
5731 FCONST0(mode).data.low = 0;
5732 FCONST0(mode).mode = mode;
5733 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5734 FCONST0 (mode), mode);
5737 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5739 mode = GET_MODE_WIDER_MODE (mode))
5741 FCONST0(mode).data.high = 0;
5742 FCONST0(mode).data.low = 0;
5743 FCONST0(mode).mode = mode;
5744 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5745 FCONST0 (mode), mode);
5747 /* We store the value 1. */
5748 FCONST1(mode).data.high = 0;
5749 FCONST1(mode).data.low = 0;
5750 FCONST1(mode).mode = mode;
5751 lshift_double (1, 0, GET_MODE_FBIT (mode),
5752 2 * HOST_BITS_PER_WIDE_INT,
5753 &FCONST1(mode).data.low,
5754 &FCONST1(mode).data.high,
5755 SIGNED_FIXED_POINT_MODE_P (mode));
5756 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5757 FCONST1 (mode), mode);
5760 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5762 mode = GET_MODE_WIDER_MODE (mode))
5764 FCONST0(mode).data.high = 0;
5765 FCONST0(mode).data.low = 0;
5766 FCONST0(mode).mode = mode;
5767 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5768 FCONST0 (mode), mode);
5770 /* We store the value 1. */
5771 FCONST1(mode).data.high = 0;
5772 FCONST1(mode).data.low = 0;
5773 FCONST1(mode).mode = mode;
5774 lshift_double (1, 0, GET_MODE_FBIT (mode),
5775 2 * HOST_BITS_PER_WIDE_INT,
5776 &FCONST1(mode).data.low,
5777 &FCONST1(mode).data.high,
5778 SIGNED_FIXED_POINT_MODE_P (mode));
5779 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5780 FCONST1 (mode), mode);
5783 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5785 mode = GET_MODE_WIDER_MODE (mode))
5787 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5790 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5792 mode = GET_MODE_WIDER_MODE (mode))
5794 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5797 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5799 mode = GET_MODE_WIDER_MODE (mode))
5801 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5802 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5805 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5807 mode = GET_MODE_WIDER_MODE (mode))
5809 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5810 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5813 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5814 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5815 const_tiny_rtx[0][i] = const0_rtx;
5817 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5818 if (STORE_FLAG_VALUE == 1)
5819 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5822 /* Produce exact duplicate of insn INSN after AFTER.
5823 Care updating of libcall regions if present. */
5826 emit_copy_of_insn_after (rtx insn, rtx after)
5830 switch (GET_CODE (insn))
5833 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5837 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5841 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5845 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5846 if (CALL_INSN_FUNCTION_USAGE (insn))
5847 CALL_INSN_FUNCTION_USAGE (new_rtx)
5848 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5849 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5850 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5851 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5852 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5853 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5860 /* Update LABEL_NUSES. */
5861 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
5863 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
5865 /* If the old insn is frame related, then so is the new one. This is
5866 primarily needed for IA-64 unwind info which marks epilogue insns,
5867 which may be duplicated by the basic block reordering code. */
5868 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
5870 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5871 will make them. REG_LABEL_TARGETs are created there too, but are
5872 supposed to be sticky, so we copy them. */
5873 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5874 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5876 if (GET_CODE (link) == EXPR_LIST)
5877 add_reg_note (new_rtx, REG_NOTE_KIND (link),
5878 copy_insn_1 (XEXP (link, 0)));
5880 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
5883 INSN_CODE (new_rtx) = INSN_CODE (insn);
5887 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5889 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5891 if (hard_reg_clobbers[mode][regno])
5892 return hard_reg_clobbers[mode][regno];
5894 return (hard_reg_clobbers[mode][regno] =
5895 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5898 #include "gt-emit-rtl.h"