1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
5 Free Software Foundation, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* Middle-to-low level generation of rtx code and insns.
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
39 #include "coretypes.h"
41 #include "diagnostic-core.h"
49 #include "hard-reg-set.h"
51 #include "insn-config.h"
54 #include "basic-block.h"
57 #include "langhooks.h"
58 #include "tree-pass.h"
62 #include "tree-flow.h"
64 struct target_rtl default_target_rtl;
66 struct target_rtl *this_target_rtl = &default_target_rtl;
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
71 /* Commonly used modes. */
73 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
75 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
76 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
78 /* Datastructures maintained for currently processed function in RTL form. */
80 struct rtl_data x_rtl;
82 /* Indexed by pseudo register number, gives the rtx for that pseudo.
83 Allocated in parallel with regno_pointer_align.
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
89 /* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
92 static GTY(()) int label_num = 1;
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx. */
98 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
102 REAL_VALUE_TYPE dconst0;
103 REAL_VALUE_TYPE dconst1;
104 REAL_VALUE_TYPE dconst2;
105 REAL_VALUE_TYPE dconstm1;
106 REAL_VALUE_TYPE dconsthalf;
108 /* Record fixed-point constant 0 and 1. */
109 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
112 /* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
117 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
119 /* A hash table storing CONST_INTs whose absolute value is greater
120 than MAX_SAVED_CONST_INT. */
122 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
123 htab_t const_int_htab;
125 /* A hash table storing memory attribute structures. */
126 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
127 htab_t mem_attrs_htab;
129 /* A hash table storing register attribute structures. */
130 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
131 htab_t reg_attrs_htab;
133 /* A hash table storing all CONST_DOUBLEs. */
134 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
135 htab_t const_double_htab;
137 /* A hash table storing all CONST_FIXEDs. */
138 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
139 htab_t const_fixed_htab;
141 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
142 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
143 #define last_location (crtl->emit.x_last_location)
144 #define first_label_num (crtl->emit.x_first_label_num)
146 static rtx make_call_insn_raw (rtx);
147 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
148 static void set_used_decls (tree);
149 static void mark_label_nuses (rtx);
150 static hashval_t const_int_htab_hash (const void *);
151 static int const_int_htab_eq (const void *, const void *);
152 static hashval_t const_double_htab_hash (const void *);
153 static int const_double_htab_eq (const void *, const void *);
154 static rtx lookup_const_double (rtx);
155 static hashval_t const_fixed_htab_hash (const void *);
156 static int const_fixed_htab_eq (const void *, const void *);
157 static rtx lookup_const_fixed (rtx);
158 static hashval_t mem_attrs_htab_hash (const void *);
159 static int mem_attrs_htab_eq (const void *, const void *);
160 static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
161 addr_space_t, enum machine_mode);
162 static hashval_t reg_attrs_htab_hash (const void *);
163 static int reg_attrs_htab_eq (const void *, const void *);
164 static reg_attrs *get_reg_attrs (tree, int);
165 static rtx gen_const_vector (enum machine_mode, int);
166 static void copy_rtx_if_shared_1 (rtx *orig);
168 /* Probability of the conditional branch currently proceeded by try_split.
169 Set to -1 otherwise. */
170 int split_branch_probability = -1;
172 /* Returns a hash code for X (which is a really a CONST_INT). */
175 const_int_htab_hash (const void *x)
177 return (hashval_t) INTVAL ((const_rtx) x);
180 /* Returns nonzero if the value represented by X (which is really a
181 CONST_INT) is the same as that given by Y (which is really a
185 const_int_htab_eq (const void *x, const void *y)
187 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
190 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
192 const_double_htab_hash (const void *x)
194 const_rtx const value = (const_rtx) x;
197 if (GET_MODE (value) == VOIDmode)
198 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
201 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
202 /* MODE is used in the comparison, so it should be in the hash. */
203 h ^= GET_MODE (value);
208 /* Returns nonzero if the value represented by X (really a ...)
209 is the same as that represented by Y (really a ...) */
211 const_double_htab_eq (const void *x, const void *y)
213 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
215 if (GET_MODE (a) != GET_MODE (b))
217 if (GET_MODE (a) == VOIDmode)
218 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
219 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
221 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
222 CONST_DOUBLE_REAL_VALUE (b));
225 /* Returns a hash code for X (which is really a CONST_FIXED). */
228 const_fixed_htab_hash (const void *x)
230 const_rtx const value = (const_rtx) x;
233 h = fixed_hash (CONST_FIXED_VALUE (value));
234 /* MODE is used in the comparison, so it should be in the hash. */
235 h ^= GET_MODE (value);
239 /* Returns nonzero if the value represented by X (really a ...)
240 is the same as that represented by Y (really a ...). */
243 const_fixed_htab_eq (const void *x, const void *y)
245 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
247 if (GET_MODE (a) != GET_MODE (b))
249 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
252 /* Returns a hash code for X (which is a really a mem_attrs *). */
255 mem_attrs_htab_hash (const void *x)
257 const mem_attrs *const p = (const mem_attrs *) x;
259 return (p->alias ^ (p->align * 1000)
260 ^ (p->addrspace * 4000)
261 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
262 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
263 ^ (size_t) iterative_hash_expr (p->expr, 0));
266 /* Returns nonzero if the value represented by X (which is really a
267 mem_attrs *) is the same as that given by Y (which is also really a
271 mem_attrs_htab_eq (const void *x, const void *y)
273 const mem_attrs *const p = (const mem_attrs *) x;
274 const mem_attrs *const q = (const mem_attrs *) y;
276 return (p->alias == q->alias && p->offset == q->offset
277 && p->size == q->size && p->align == q->align
278 && p->addrspace == q->addrspace
279 && (p->expr == q->expr
280 || (p->expr != NULL_TREE && q->expr != NULL_TREE
281 && operand_equal_p (p->expr, q->expr, 0))));
284 /* Allocate a new mem_attrs structure and insert it into the hash table if
285 one identical to it is not already in the table. We are doing this for
289 get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
290 unsigned int align, addr_space_t addrspace, enum machine_mode mode)
295 /* If everything is the default, we can just return zero.
296 This must match what the corresponding MEM_* macros return when the
297 field is not present. */
298 if (alias == 0 && expr == 0 && offset == 0 && addrspace == 0
300 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
301 && (STRICT_ALIGNMENT && mode != BLKmode
302 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
307 attrs.offset = offset;
310 attrs.addrspace = addrspace;
312 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
315 *slot = ggc_alloc_mem_attrs ();
316 memcpy (*slot, &attrs, sizeof (mem_attrs));
319 return (mem_attrs *) *slot;
322 /* Returns a hash code for X (which is a really a reg_attrs *). */
325 reg_attrs_htab_hash (const void *x)
327 const reg_attrs *const p = (const reg_attrs *) x;
329 return ((p->offset * 1000) ^ (intptr_t) p->decl);
332 /* Returns nonzero if the value represented by X (which is really a
333 reg_attrs *) is the same as that given by Y (which is also really a
337 reg_attrs_htab_eq (const void *x, const void *y)
339 const reg_attrs *const p = (const reg_attrs *) x;
340 const reg_attrs *const q = (const reg_attrs *) y;
342 return (p->decl == q->decl && p->offset == q->offset);
344 /* Allocate a new reg_attrs structure and insert it into the hash table if
345 one identical to it is not already in the table. We are doing this for
349 get_reg_attrs (tree decl, int offset)
354 /* If everything is the default, we can just return zero. */
355 if (decl == 0 && offset == 0)
359 attrs.offset = offset;
361 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
364 *slot = ggc_alloc_reg_attrs ();
365 memcpy (*slot, &attrs, sizeof (reg_attrs));
368 return (reg_attrs *) *slot;
373 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
379 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
380 MEM_VOLATILE_P (x) = true;
386 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
387 don't attempt to share with the various global pieces of rtl (such as
388 frame_pointer_rtx). */
391 gen_raw_REG (enum machine_mode mode, int regno)
393 rtx x = gen_rtx_raw_REG (mode, regno);
394 ORIGINAL_REGNO (x) = regno;
398 /* There are some RTL codes that require special attention; the generation
399 functions do the raw handling. If you add to this list, modify
400 special_rtx in gengenrtl.c as well. */
403 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
407 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
408 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
410 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
411 if (const_true_rtx && arg == STORE_FLAG_VALUE)
412 return const_true_rtx;
415 /* Look up the CONST_INT in the hash table. */
416 slot = htab_find_slot_with_hash (const_int_htab, &arg,
417 (hashval_t) arg, INSERT);
419 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
425 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
427 return GEN_INT (trunc_int_for_mode (c, mode));
430 /* CONST_DOUBLEs might be created from pairs of integers, or from
431 REAL_VALUE_TYPEs. Also, their length is known only at run time,
432 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
434 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
435 hash table. If so, return its counterpart; otherwise add it
436 to the hash table and return it. */
438 lookup_const_double (rtx real)
440 void **slot = htab_find_slot (const_double_htab, real, INSERT);
447 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
448 VALUE in mode MODE. */
450 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
452 rtx real = rtx_alloc (CONST_DOUBLE);
453 PUT_MODE (real, mode);
457 return lookup_const_double (real);
460 /* Determine whether FIXED, a CONST_FIXED, already exists in the
461 hash table. If so, return its counterpart; otherwise add it
462 to the hash table and return it. */
465 lookup_const_fixed (rtx fixed)
467 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
474 /* Return a CONST_FIXED rtx for a fixed-point value specified by
475 VALUE in mode MODE. */
478 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
480 rtx fixed = rtx_alloc (CONST_FIXED);
481 PUT_MODE (fixed, mode);
485 return lookup_const_fixed (fixed);
488 /* Constructs double_int from rtx CST. */
491 rtx_to_double_int (const_rtx cst)
495 if (CONST_INT_P (cst))
496 r = shwi_to_double_int (INTVAL (cst));
497 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
499 r.low = CONST_DOUBLE_LOW (cst);
500 r.high = CONST_DOUBLE_HIGH (cst);
509 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
513 immed_double_int_const (double_int i, enum machine_mode mode)
515 return immed_double_const (i.low, i.high, mode);
518 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
519 of ints: I0 is the low-order word and I1 is the high-order word.
520 Do not use this routine for non-integer modes; convert to
521 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
524 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
529 /* There are the following cases (note that there are no modes with
530 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
532 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
534 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
535 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
536 from copies of the sign bit, and sign of i0 and i1 are the same), then
537 we return a CONST_INT for i0.
538 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
539 if (mode != VOIDmode)
541 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
542 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
543 /* We can get a 0 for an error mark. */
544 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
545 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
547 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
548 return gen_int_mode (i0, mode);
550 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
553 /* If this integer fits in one word, return a CONST_INT. */
554 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
557 /* We use VOIDmode for integers. */
558 value = rtx_alloc (CONST_DOUBLE);
559 PUT_MODE (value, VOIDmode);
561 CONST_DOUBLE_LOW (value) = i0;
562 CONST_DOUBLE_HIGH (value) = i1;
564 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
565 XWINT (value, i) = 0;
567 return lookup_const_double (value);
571 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
573 /* In case the MD file explicitly references the frame pointer, have
574 all such references point to the same frame pointer. This is
575 used during frame pointer elimination to distinguish the explicit
576 references to these registers from pseudos that happened to be
579 If we have eliminated the frame pointer or arg pointer, we will
580 be using it as a normal register, for example as a spill
581 register. In such cases, we might be accessing it in a mode that
582 is not Pmode and therefore cannot use the pre-allocated rtx.
584 Also don't do this when we are making new REGs in reload, since
585 we don't want to get confused with the real pointers. */
587 if (mode == Pmode && !reload_in_progress)
589 if (regno == FRAME_POINTER_REGNUM
590 && (!reload_completed || frame_pointer_needed))
591 return frame_pointer_rtx;
592 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
593 if (regno == HARD_FRAME_POINTER_REGNUM
594 && (!reload_completed || frame_pointer_needed))
595 return hard_frame_pointer_rtx;
597 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
598 if (regno == ARG_POINTER_REGNUM)
599 return arg_pointer_rtx;
601 #ifdef RETURN_ADDRESS_POINTER_REGNUM
602 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
603 return return_address_pointer_rtx;
605 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
606 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
607 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
608 return pic_offset_table_rtx;
609 if (regno == STACK_POINTER_REGNUM)
610 return stack_pointer_rtx;
614 /* If the per-function register table has been set up, try to re-use
615 an existing entry in that table to avoid useless generation of RTL.
617 This code is disabled for now until we can fix the various backends
618 which depend on having non-shared hard registers in some cases. Long
619 term we want to re-enable this code as it can significantly cut down
620 on the amount of useless RTL that gets generated.
622 We'll also need to fix some code that runs after reload that wants to
623 set ORIGINAL_REGNO. */
628 && regno < FIRST_PSEUDO_REGISTER
629 && reg_raw_mode[regno] == mode)
630 return regno_reg_rtx[regno];
633 return gen_raw_REG (mode, regno);
637 gen_rtx_MEM (enum machine_mode mode, rtx addr)
639 rtx rt = gen_rtx_raw_MEM (mode, addr);
641 /* This field is not cleared by the mere allocation of the rtx, so
648 /* Generate a memory referring to non-trapping constant memory. */
651 gen_const_mem (enum machine_mode mode, rtx addr)
653 rtx mem = gen_rtx_MEM (mode, addr);
654 MEM_READONLY_P (mem) = 1;
655 MEM_NOTRAP_P (mem) = 1;
659 /* Generate a MEM referring to fixed portions of the frame, e.g., register
663 gen_frame_mem (enum machine_mode mode, rtx addr)
665 rtx mem = gen_rtx_MEM (mode, addr);
666 MEM_NOTRAP_P (mem) = 1;
667 set_mem_alias_set (mem, get_frame_alias_set ());
671 /* Generate a MEM referring to a temporary use of the stack, not part
672 of the fixed stack frame. For example, something which is pushed
673 by a target splitter. */
675 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
677 rtx mem = gen_rtx_MEM (mode, addr);
678 MEM_NOTRAP_P (mem) = 1;
679 if (!cfun->calls_alloca)
680 set_mem_alias_set (mem, get_frame_alias_set ());
684 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
685 this construct would be valid, and false otherwise. */
688 validate_subreg (enum machine_mode omode, enum machine_mode imode,
689 const_rtx reg, unsigned int offset)
691 unsigned int isize = GET_MODE_SIZE (imode);
692 unsigned int osize = GET_MODE_SIZE (omode);
694 /* All subregs must be aligned. */
695 if (offset % osize != 0)
698 /* The subreg offset cannot be outside the inner object. */
702 /* ??? This should not be here. Temporarily continue to allow word_mode
703 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
704 Generally, backends are doing something sketchy but it'll take time to
706 if (omode == word_mode)
708 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
709 is the culprit here, and not the backends. */
710 else if (osize >= UNITS_PER_WORD && isize >= osize)
712 /* Allow component subregs of complex and vector. Though given the below
713 extraction rules, it's not always clear what that means. */
714 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
715 && GET_MODE_INNER (imode) == omode)
717 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
718 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
719 represent this. It's questionable if this ought to be represented at
720 all -- why can't this all be hidden in post-reload splitters that make
721 arbitrarily mode changes to the registers themselves. */
722 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
724 /* Subregs involving floating point modes are not allowed to
725 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
726 (subreg:SI (reg:DF) 0) isn't. */
727 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
733 /* Paradoxical subregs must have offset zero. */
737 /* This is a normal subreg. Verify that the offset is representable. */
739 /* For hard registers, we already have most of these rules collected in
740 subreg_offset_representable_p. */
741 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
743 unsigned int regno = REGNO (reg);
745 #ifdef CANNOT_CHANGE_MODE_CLASS
746 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
747 && GET_MODE_INNER (imode) == omode)
749 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
753 return subreg_offset_representable_p (regno, imode, offset, omode);
756 /* For pseudo registers, we want most of the same checks. Namely:
757 If the register no larger than a word, the subreg must be lowpart.
758 If the register is larger than a word, the subreg must be the lowpart
759 of a subword. A subreg does *not* perform arbitrary bit extraction.
760 Given that we've already checked mode/offset alignment, we only have
761 to check subword subregs here. */
762 if (osize < UNITS_PER_WORD)
764 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
765 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
766 if (offset % UNITS_PER_WORD != low_off)
773 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
775 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
776 return gen_rtx_raw_SUBREG (mode, reg, offset);
779 /* Generate a SUBREG representing the least-significant part of REG if MODE
780 is smaller than mode of REG, otherwise paradoxical SUBREG. */
783 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
785 enum machine_mode inmode;
787 inmode = GET_MODE (reg);
788 if (inmode == VOIDmode)
790 return gen_rtx_SUBREG (mode, reg,
791 subreg_lowpart_offset (mode, inmode));
795 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
798 gen_rtvec (int n, ...)
806 /* Don't allocate an empty rtvec... */
813 rt_val = rtvec_alloc (n);
815 for (i = 0; i < n; i++)
816 rt_val->elem[i] = va_arg (p, rtx);
823 gen_rtvec_v (int n, rtx *argp)
828 /* Don't allocate an empty rtvec... */
832 rt_val = rtvec_alloc (n);
834 for (i = 0; i < n; i++)
835 rt_val->elem[i] = *argp++;
840 /* Return the number of bytes between the start of an OUTER_MODE
841 in-memory value and the start of an INNER_MODE in-memory value,
842 given that the former is a lowpart of the latter. It may be a
843 paradoxical lowpart, in which case the offset will be negative
844 on big-endian targets. */
847 byte_lowpart_offset (enum machine_mode outer_mode,
848 enum machine_mode inner_mode)
850 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
851 return subreg_lowpart_offset (outer_mode, inner_mode);
853 return -subreg_lowpart_offset (inner_mode, outer_mode);
856 /* Generate a REG rtx for a new pseudo register of mode MODE.
857 This pseudo is assigned the next sequential register number. */
860 gen_reg_rtx (enum machine_mode mode)
863 unsigned int align = GET_MODE_ALIGNMENT (mode);
865 gcc_assert (can_create_pseudo_p ());
867 /* If a virtual register with bigger mode alignment is generated,
868 increase stack alignment estimation because it might be spilled
870 if (SUPPORTS_STACK_ALIGNMENT
871 && crtl->stack_alignment_estimated < align
872 && !crtl->stack_realign_processed)
874 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
875 if (crtl->stack_alignment_estimated < min_align)
876 crtl->stack_alignment_estimated = min_align;
879 if (generating_concat_p
880 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
881 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
883 /* For complex modes, don't make a single pseudo.
884 Instead, make a CONCAT of two pseudos.
885 This allows noncontiguous allocation of the real and imaginary parts,
886 which makes much better code. Besides, allocating DCmode
887 pseudos overstrains reload on some machines like the 386. */
888 rtx realpart, imagpart;
889 enum machine_mode partmode = GET_MODE_INNER (mode);
891 realpart = gen_reg_rtx (partmode);
892 imagpart = gen_reg_rtx (partmode);
893 return gen_rtx_CONCAT (mode, realpart, imagpart);
896 /* Make sure regno_pointer_align, and regno_reg_rtx are large
897 enough to have an element for this pseudo reg number. */
899 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
901 int old_size = crtl->emit.regno_pointer_align_length;
905 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
906 memset (tmp + old_size, 0, old_size);
907 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
909 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
910 memset (new1 + old_size, 0, old_size * sizeof (rtx));
911 regno_reg_rtx = new1;
913 crtl->emit.regno_pointer_align_length = old_size * 2;
916 val = gen_raw_REG (mode, reg_rtx_no);
917 regno_reg_rtx[reg_rtx_no++] = val;
921 /* Update NEW with the same attributes as REG, but with OFFSET added
922 to the REG_OFFSET. */
925 update_reg_offset (rtx new_rtx, rtx reg, int offset)
927 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
928 REG_OFFSET (reg) + offset);
931 /* Generate a register with same attributes as REG, but with OFFSET
932 added to the REG_OFFSET. */
935 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
938 rtx new_rtx = gen_rtx_REG (mode, regno);
940 update_reg_offset (new_rtx, reg, offset);
944 /* Generate a new pseudo-register with the same attributes as REG, but
945 with OFFSET added to the REG_OFFSET. */
948 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
950 rtx new_rtx = gen_reg_rtx (mode);
952 update_reg_offset (new_rtx, reg, offset);
956 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
957 new register is a (possibly paradoxical) lowpart of the old one. */
960 adjust_reg_mode (rtx reg, enum machine_mode mode)
962 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
963 PUT_MODE (reg, mode);
966 /* Copy REG's attributes from X, if X has any attributes. If REG and X
967 have different modes, REG is a (possibly paradoxical) lowpart of X. */
970 set_reg_attrs_from_value (rtx reg, rtx x)
974 /* Hard registers can be reused for multiple purposes within the same
975 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
977 if (HARD_REGISTER_P (reg))
980 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
983 if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
985 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
987 mark_reg_pointer (reg, 0);
992 update_reg_offset (reg, x, offset);
994 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
998 /* Generate a REG rtx for a new pseudo register, copying the mode
999 and attributes from X. */
1002 gen_reg_rtx_and_attrs (rtx x)
1004 rtx reg = gen_reg_rtx (GET_MODE (x));
1005 set_reg_attrs_from_value (reg, x);
1009 /* Set the register attributes for registers contained in PARM_RTX.
1010 Use needed values from memory attributes of MEM. */
1013 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1015 if (REG_P (parm_rtx))
1016 set_reg_attrs_from_value (parm_rtx, mem);
1017 else if (GET_CODE (parm_rtx) == PARALLEL)
1019 /* Check for a NULL entry in the first slot, used to indicate that the
1020 parameter goes both on the stack and in registers. */
1021 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1022 for (; i < XVECLEN (parm_rtx, 0); i++)
1024 rtx x = XVECEXP (parm_rtx, 0, i);
1025 if (REG_P (XEXP (x, 0)))
1026 REG_ATTRS (XEXP (x, 0))
1027 = get_reg_attrs (MEM_EXPR (mem),
1028 INTVAL (XEXP (x, 1)));
1033 /* Set the REG_ATTRS for registers in value X, given that X represents
1037 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1039 if (GET_CODE (x) == SUBREG)
1041 gcc_assert (subreg_lowpart_p (x));
1046 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1048 if (GET_CODE (x) == CONCAT)
1050 if (REG_P (XEXP (x, 0)))
1051 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1052 if (REG_P (XEXP (x, 1)))
1053 REG_ATTRS (XEXP (x, 1))
1054 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1056 if (GET_CODE (x) == PARALLEL)
1060 /* Check for a NULL entry, used to indicate that the parameter goes
1061 both on the stack and in registers. */
1062 if (XEXP (XVECEXP (x, 0, 0), 0))
1067 for (i = start; i < XVECLEN (x, 0); i++)
1069 rtx y = XVECEXP (x, 0, i);
1070 if (REG_P (XEXP (y, 0)))
1071 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1076 /* Assign the RTX X to declaration T. */
1079 set_decl_rtl (tree t, rtx x)
1081 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1083 set_reg_attrs_for_decl_rtl (t, x);
1086 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1087 if the ABI requires the parameter to be passed by reference. */
1090 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1092 DECL_INCOMING_RTL (t) = x;
1093 if (x && !by_reference_p)
1094 set_reg_attrs_for_decl_rtl (t, x);
1097 /* Identify REG (which may be a CONCAT) as a user register. */
1100 mark_user_reg (rtx reg)
1102 if (GET_CODE (reg) == CONCAT)
1104 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1105 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1109 gcc_assert (REG_P (reg));
1110 REG_USERVAR_P (reg) = 1;
1114 /* Identify REG as a probable pointer register and show its alignment
1115 as ALIGN, if nonzero. */
1118 mark_reg_pointer (rtx reg, int align)
1120 if (! REG_POINTER (reg))
1122 REG_POINTER (reg) = 1;
1125 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1127 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1128 /* We can no-longer be sure just how aligned this pointer is. */
1129 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1132 /* Return 1 plus largest pseudo reg number used in the current function. */
1140 /* Return 1 + the largest label number used so far in the current function. */
1143 max_label_num (void)
1148 /* Return first label number used in this function (if any were used). */
1151 get_first_label_num (void)
1153 return first_label_num;
1156 /* If the rtx for label was created during the expansion of a nested
1157 function, then first_label_num won't include this label number.
1158 Fix this now so that array indices work later. */
1161 maybe_set_first_label_num (rtx x)
1163 if (CODE_LABEL_NUMBER (x) < first_label_num)
1164 first_label_num = CODE_LABEL_NUMBER (x);
1167 /* Return a value representing some low-order bits of X, where the number
1168 of low-order bits is given by MODE. Note that no conversion is done
1169 between floating-point and fixed-point values, rather, the bit
1170 representation is returned.
1172 This function handles the cases in common between gen_lowpart, below,
1173 and two variants in cse.c and combine.c. These are the cases that can
1174 be safely handled at all points in the compilation.
1176 If this is not a case we can handle, return 0. */
1179 gen_lowpart_common (enum machine_mode mode, rtx x)
1181 int msize = GET_MODE_SIZE (mode);
1184 enum machine_mode innermode;
1186 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1187 so we have to make one up. Yuk. */
1188 innermode = GET_MODE (x);
1190 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1191 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1192 else if (innermode == VOIDmode)
1193 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1195 xsize = GET_MODE_SIZE (innermode);
1197 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1199 if (innermode == mode)
1202 /* MODE must occupy no more words than the mode of X. */
1203 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1204 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1207 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1208 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1211 offset = subreg_lowpart_offset (mode, innermode);
1213 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1214 && (GET_MODE_CLASS (mode) == MODE_INT
1215 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1217 /* If we are getting the low-order part of something that has been
1218 sign- or zero-extended, we can either just use the object being
1219 extended or make a narrower extension. If we want an even smaller
1220 piece than the size of the object being extended, call ourselves
1223 This case is used mostly by combine and cse. */
1225 if (GET_MODE (XEXP (x, 0)) == mode)
1227 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1228 return gen_lowpart_common (mode, XEXP (x, 0));
1229 else if (msize < xsize)
1230 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1232 else if (GET_CODE (x) == SUBREG || REG_P (x)
1233 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1234 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
1235 return simplify_gen_subreg (mode, x, innermode, offset);
1237 /* Otherwise, we can't do this. */
1242 gen_highpart (enum machine_mode mode, rtx x)
1244 unsigned int msize = GET_MODE_SIZE (mode);
1247 /* This case loses if X is a subreg. To catch bugs early,
1248 complain if an invalid MODE is used even in other cases. */
1249 gcc_assert (msize <= UNITS_PER_WORD
1250 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1252 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1253 subreg_highpart_offset (mode, GET_MODE (x)));
1254 gcc_assert (result);
1256 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1257 the target if we have a MEM. gen_highpart must return a valid operand,
1258 emitting code if necessary to do so. */
1261 result = validize_mem (result);
1262 gcc_assert (result);
1268 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1269 be VOIDmode constant. */
1271 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1273 if (GET_MODE (exp) != VOIDmode)
1275 gcc_assert (GET_MODE (exp) == innermode);
1276 return gen_highpart (outermode, exp);
1278 return simplify_gen_subreg (outermode, exp, innermode,
1279 subreg_highpart_offset (outermode, innermode));
1282 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1285 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1287 unsigned int offset = 0;
1288 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1292 if (WORDS_BIG_ENDIAN)
1293 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1294 if (BYTES_BIG_ENDIAN)
1295 offset += difference % UNITS_PER_WORD;
1301 /* Return offset in bytes to get OUTERMODE high part
1302 of the value in mode INNERMODE stored in memory in target format. */
1304 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1306 unsigned int offset = 0;
1307 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1309 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1313 if (! WORDS_BIG_ENDIAN)
1314 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1315 if (! BYTES_BIG_ENDIAN)
1316 offset += difference % UNITS_PER_WORD;
1322 /* Return 1 iff X, assumed to be a SUBREG,
1323 refers to the least significant part of its containing reg.
1324 If X is not a SUBREG, always return 1 (it is its own low part!). */
1327 subreg_lowpart_p (const_rtx x)
1329 if (GET_CODE (x) != SUBREG)
1331 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1334 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1335 == SUBREG_BYTE (x));
1338 /* Return true if X is a paradoxical subreg, false otherwise. */
1340 paradoxical_subreg_p (const_rtx x)
1342 if (GET_CODE (x) != SUBREG)
1344 return (GET_MODE_PRECISION (GET_MODE (x))
1345 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1348 /* Return subword OFFSET of operand OP.
1349 The word number, OFFSET, is interpreted as the word number starting
1350 at the low-order address. OFFSET 0 is the low-order word if not
1351 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1353 If we cannot extract the required word, we return zero. Otherwise,
1354 an rtx corresponding to the requested word will be returned.
1356 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1357 reload has completed, a valid address will always be returned. After
1358 reload, if a valid address cannot be returned, we return zero.
1360 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1361 it is the responsibility of the caller.
1363 MODE is the mode of OP in case it is a CONST_INT.
1365 ??? This is still rather broken for some cases. The problem for the
1366 moment is that all callers of this thing provide no 'goal mode' to
1367 tell us to work with. This exists because all callers were written
1368 in a word based SUBREG world.
1369 Now use of this function can be deprecated by simplify_subreg in most
1374 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1376 if (mode == VOIDmode)
1377 mode = GET_MODE (op);
1379 gcc_assert (mode != VOIDmode);
1381 /* If OP is narrower than a word, fail. */
1383 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1386 /* If we want a word outside OP, return zero. */
1388 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1391 /* Form a new MEM at the requested address. */
1394 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1396 if (! validate_address)
1399 else if (reload_completed)
1401 if (! strict_memory_address_addr_space_p (word_mode,
1403 MEM_ADDR_SPACE (op)))
1407 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1410 /* Rest can be handled by simplify_subreg. */
1411 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1414 /* Similar to `operand_subword', but never return 0. If we can't
1415 extract the required subword, put OP into a register and try again.
1416 The second attempt must succeed. We always validate the address in
1419 MODE is the mode of OP, in case it is CONST_INT. */
1422 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1424 rtx result = operand_subword (op, offset, 1, mode);
1429 if (mode != BLKmode && mode != VOIDmode)
1431 /* If this is a register which can not be accessed by words, copy it
1432 to a pseudo register. */
1434 op = copy_to_reg (op);
1436 op = force_reg (mode, op);
1439 result = operand_subword (op, offset, 1, mode);
1440 gcc_assert (result);
1445 /* Returns 1 if both MEM_EXPR can be considered equal
1449 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1454 if (! expr1 || ! expr2)
1457 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1460 return operand_equal_p (expr1, expr2, 0);
1463 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1464 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1468 get_mem_align_offset (rtx mem, unsigned int align)
1471 unsigned HOST_WIDE_INT offset;
1473 /* This function can't use
1474 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1475 || !CONST_INT_P (MEM_OFFSET (mem))
1476 || (MAX (MEM_ALIGN (mem),
1477 get_object_alignment (MEM_EXPR (mem), align))
1481 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1483 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1484 for <variable>. get_inner_reference doesn't handle it and
1485 even if it did, the alignment in that case needs to be determined
1486 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1487 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1488 isn't sufficiently aligned, the object it is in might be. */
1489 gcc_assert (MEM_P (mem));
1490 expr = MEM_EXPR (mem);
1491 if (expr == NULL_TREE
1492 || MEM_OFFSET (mem) == NULL_RTX
1493 || !CONST_INT_P (MEM_OFFSET (mem)))
1496 offset = INTVAL (MEM_OFFSET (mem));
1499 if (DECL_ALIGN (expr) < align)
1502 else if (INDIRECT_REF_P (expr))
1504 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1507 else if (TREE_CODE (expr) == COMPONENT_REF)
1511 tree inner = TREE_OPERAND (expr, 0);
1512 tree field = TREE_OPERAND (expr, 1);
1513 tree byte_offset = component_ref_field_offset (expr);
1514 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1517 || !host_integerp (byte_offset, 1)
1518 || !host_integerp (bit_offset, 1))
1521 offset += tree_low_cst (byte_offset, 1);
1522 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1524 if (inner == NULL_TREE)
1526 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1527 < (unsigned int) align)
1531 else if (DECL_P (inner))
1533 if (DECL_ALIGN (inner) < align)
1537 else if (TREE_CODE (inner) != COMPONENT_REF)
1545 return offset & ((align / BITS_PER_UNIT) - 1);
1548 /* Given REF (a MEM) and T, either the type of X or the expression
1549 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1550 if we are making a new object of this type. BITPOS is nonzero if
1551 there is an offset outstanding on T that will be applied later. */
1554 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1555 HOST_WIDE_INT bitpos)
1557 alias_set_type alias;
1559 rtx offset = NULL_RTX;
1560 rtx size = NULL_RTX;
1561 unsigned int align = BITS_PER_UNIT;
1562 HOST_WIDE_INT apply_bitpos = 0;
1565 /* It can happen that type_for_mode was given a mode for which there
1566 is no language-level type. In which case it returns NULL, which
1571 type = TYPE_P (t) ? t : TREE_TYPE (t);
1572 if (type == error_mark_node)
1575 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1576 wrong answer, as it assumes that DECL_RTL already has the right alias
1577 info. Callers should not set DECL_RTL until after the call to
1578 set_mem_attributes. */
1579 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1581 /* Get the alias set from the expression or type (perhaps using a
1582 front-end routine) and use it. */
1583 alias = get_alias_set (t);
1585 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1586 MEM_IN_STRUCT_P (ref)
1587 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
1588 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1590 /* If we are making an object of this type, or if this is a DECL, we know
1591 that it is a scalar if the type is not an aggregate. */
1592 if ((objectp || DECL_P (t))
1593 && ! AGGREGATE_TYPE_P (type)
1594 && TREE_CODE (type) != COMPLEX_TYPE)
1595 MEM_SCALAR_P (ref) = 1;
1597 /* Default values from pre-existing memory attributes if present. */
1598 if (MEM_ATTRS (ref))
1600 /* ??? Can this ever happen? Calling this routine on a MEM that
1601 already carries memory attributes should probably be invalid. */
1602 expr = MEM_EXPR (ref);
1603 offset = MEM_OFFSET (ref);
1604 size = MEM_SIZE (ref);
1605 align = MEM_ALIGN (ref);
1608 /* Otherwise, default values from the mode of the MEM reference. */
1609 else if (GET_MODE (ref) != BLKmode)
1611 /* Respect mode size. */
1612 size = GEN_INT (GET_MODE_SIZE (GET_MODE (ref)));
1613 /* ??? Is this really necessary? We probably should always get
1614 the size from the type below. */
1616 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1617 if T is an object, always compute the object alignment below. */
1618 if (STRICT_ALIGNMENT && TYPE_P (t))
1619 align = GET_MODE_ALIGNMENT (GET_MODE (ref));
1620 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1621 e.g. if the type carries an alignment attribute. Should we be
1622 able to simply always use TYPE_ALIGN? */
1625 /* We can set the alignment from the type if we are making an object,
1626 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1627 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1628 align = MAX (align, TYPE_ALIGN (type));
1630 else if (TREE_CODE (t) == MEM_REF)
1632 tree op0 = TREE_OPERAND (t, 0);
1633 if (TREE_CODE (op0) == ADDR_EXPR
1634 && (DECL_P (TREE_OPERAND (op0, 0))
1635 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
1637 if (DECL_P (TREE_OPERAND (op0, 0)))
1638 align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1639 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1641 align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
1642 #ifdef CONSTANT_ALIGNMENT
1643 align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0), align);
1646 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1648 unsigned HOST_WIDE_INT ioff
1649 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1650 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1651 align = MIN (aoff, align);
1655 /* ??? This isn't fully correct, we can't set the alignment from the
1656 type in all cases. */
1657 align = MAX (align, TYPE_ALIGN (type));
1660 else if (TREE_CODE (t) == TARGET_MEM_REF)
1661 /* ??? This isn't fully correct, we can't set the alignment from the
1662 type in all cases. */
1663 align = MAX (align, TYPE_ALIGN (type));
1665 /* If the size is known, we can set that. */
1666 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1667 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1669 /* If T is not a type, we may be able to deduce some more information about
1674 bool align_computed = false;
1676 if (TREE_THIS_VOLATILE (t))
1677 MEM_VOLATILE_P (ref) = 1;
1679 /* Now remove any conversions: they don't change what the underlying
1680 object is. Likewise for SAVE_EXPR. */
1681 while (CONVERT_EXPR_P (t)
1682 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1683 || TREE_CODE (t) == SAVE_EXPR)
1684 t = TREE_OPERAND (t, 0);
1686 /* Note whether this expression can trap. */
1687 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1689 base = get_base_address (t);
1690 if (base && DECL_P (base)
1691 && TREE_READONLY (base)
1692 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1693 && !TREE_THIS_VOLATILE (base))
1694 MEM_READONLY_P (ref) = 1;
1696 /* If this expression uses it's parent's alias set, mark it such
1697 that we won't change it. */
1698 if (component_uses_parent_alias_set (t))
1699 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1701 /* If this is a decl, set the attributes of the MEM from it. */
1705 offset = const0_rtx;
1706 apply_bitpos = bitpos;
1707 size = (DECL_SIZE_UNIT (t)
1708 && host_integerp (DECL_SIZE_UNIT (t), 1)
1709 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1710 align = DECL_ALIGN (t);
1711 align_computed = true;
1714 /* If this is a constant, we know the alignment. */
1715 else if (CONSTANT_CLASS_P (t))
1717 align = TYPE_ALIGN (type);
1718 #ifdef CONSTANT_ALIGNMENT
1719 align = CONSTANT_ALIGNMENT (t, align);
1721 align_computed = true;
1724 /* If this is a field reference and not a bit-field, record it. */
1725 /* ??? There is some information that can be gleaned from bit-fields,
1726 such as the word offset in the structure that might be modified.
1727 But skip it for now. */
1728 else if (TREE_CODE (t) == COMPONENT_REF
1729 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1732 offset = const0_rtx;
1733 apply_bitpos = bitpos;
1734 /* ??? Any reason the field size would be different than
1735 the size we got from the type? */
1738 /* If this is an array reference, look for an outer field reference. */
1739 else if (TREE_CODE (t) == ARRAY_REF)
1741 tree off_tree = size_zero_node;
1742 /* We can't modify t, because we use it at the end of the
1748 tree index = TREE_OPERAND (t2, 1);
1749 tree low_bound = array_ref_low_bound (t2);
1750 tree unit_size = array_ref_element_size (t2);
1752 /* We assume all arrays have sizes that are a multiple of a byte.
1753 First subtract the lower bound, if any, in the type of the
1754 index, then convert to sizetype and multiply by the size of
1755 the array element. */
1756 if (! integer_zerop (low_bound))
1757 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1760 off_tree = size_binop (PLUS_EXPR,
1761 size_binop (MULT_EXPR,
1762 fold_convert (sizetype,
1766 t2 = TREE_OPERAND (t2, 0);
1768 while (TREE_CODE (t2) == ARRAY_REF);
1774 if (host_integerp (off_tree, 1))
1776 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1777 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1778 align = DECL_ALIGN (t2);
1779 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1781 align_computed = true;
1782 offset = GEN_INT (ioff);
1783 apply_bitpos = bitpos;
1786 else if (TREE_CODE (t2) == COMPONENT_REF)
1790 if (host_integerp (off_tree, 1))
1792 offset = GEN_INT (tree_low_cst (off_tree, 1));
1793 apply_bitpos = bitpos;
1795 /* ??? Any reason the field size would be different than
1796 the size we got from the type? */
1799 /* If this is an indirect reference, record it. */
1800 else if (TREE_CODE (t) == MEM_REF)
1803 offset = const0_rtx;
1804 apply_bitpos = bitpos;
1808 /* If this is an indirect reference, record it. */
1809 else if (TREE_CODE (t) == MEM_REF
1810 || TREE_CODE (t) == TARGET_MEM_REF)
1813 offset = const0_rtx;
1814 apply_bitpos = bitpos;
1817 if (!align_computed && !INDIRECT_REF_P (t))
1819 unsigned int obj_align = get_object_alignment (t, BIGGEST_ALIGNMENT);
1820 align = MAX (align, obj_align);
1824 /* If we modified OFFSET based on T, then subtract the outstanding
1825 bit position offset. Similarly, increase the size of the accessed
1826 object to contain the negative offset. */
1829 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1831 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1834 /* Now set the attributes we computed above. */
1836 = get_mem_attrs (alias, expr, offset, size, align,
1837 TYPE_ADDR_SPACE (type), GET_MODE (ref));
1839 /* If this is already known to be a scalar or aggregate, we are done. */
1840 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1843 /* If it is a reference into an aggregate, this is part of an aggregate.
1844 Otherwise we don't know. */
1845 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1846 || TREE_CODE (t) == ARRAY_RANGE_REF
1847 || TREE_CODE (t) == BIT_FIELD_REF)
1848 MEM_IN_STRUCT_P (ref) = 1;
1852 set_mem_attributes (rtx ref, tree t, int objectp)
1854 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1857 /* Set the alias set of MEM to SET. */
1860 set_mem_alias_set (rtx mem, alias_set_type set)
1862 /* If the new and old alias sets don't conflict, something is wrong. */
1863 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1865 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1866 MEM_SIZE (mem), MEM_ALIGN (mem),
1867 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1870 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1873 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1875 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1876 MEM_OFFSET (mem), MEM_SIZE (mem),
1877 MEM_ALIGN (mem), addrspace, GET_MODE (mem));
1880 /* Set the alignment of MEM to ALIGN bits. */
1883 set_mem_align (rtx mem, unsigned int align)
1885 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1886 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1887 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1890 /* Set the expr for MEM to EXPR. */
1893 set_mem_expr (rtx mem, tree expr)
1896 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1897 MEM_SIZE (mem), MEM_ALIGN (mem),
1898 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1901 /* Set the offset of MEM to OFFSET. */
1904 set_mem_offset (rtx mem, rtx offset)
1906 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1907 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1908 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1911 /* Set the size of MEM to SIZE. */
1914 set_mem_size (rtx mem, rtx size)
1916 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1917 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1918 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1921 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1922 and its address changed to ADDR. (VOIDmode means don't change the mode.
1923 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1924 returned memory location is required to be valid. The memory
1925 attributes are not changed. */
1928 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1933 gcc_assert (MEM_P (memref));
1934 as = MEM_ADDR_SPACE (memref);
1935 if (mode == VOIDmode)
1936 mode = GET_MODE (memref);
1938 addr = XEXP (memref, 0);
1939 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1940 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1945 if (reload_in_progress || reload_completed)
1946 gcc_assert (memory_address_addr_space_p (mode, addr, as));
1948 addr = memory_address_addr_space (mode, addr, as);
1951 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1954 new_rtx = gen_rtx_MEM (mode, addr);
1955 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1959 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1960 way we are changing MEMREF, so we only preserve the alias set. */
1963 change_address (rtx memref, enum machine_mode mode, rtx addr)
1965 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1966 enum machine_mode mmode = GET_MODE (new_rtx);
1969 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1970 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1972 /* If there are no changes, just return the original memory reference. */
1973 if (new_rtx == memref)
1975 if (MEM_ATTRS (memref) == 0
1976 || (MEM_EXPR (memref) == NULL
1977 && MEM_OFFSET (memref) == NULL
1978 && MEM_SIZE (memref) == size
1979 && MEM_ALIGN (memref) == align))
1982 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1983 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1987 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align,
1988 MEM_ADDR_SPACE (memref), mmode);
1993 /* Return a memory reference like MEMREF, but with its mode changed
1994 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1995 nonzero, the memory address is forced to be valid.
1996 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1997 and caller is responsible for adjusting MEMREF base register. */
2000 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2001 int validate, int adjust)
2003 rtx addr = XEXP (memref, 0);
2005 rtx memoffset = MEM_OFFSET (memref);
2007 unsigned int memalign = MEM_ALIGN (memref);
2008 addr_space_t as = MEM_ADDR_SPACE (memref);
2009 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2012 /* If there are no changes, just return the original memory reference. */
2013 if (mode == GET_MODE (memref) && !offset
2014 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2017 /* ??? Prefer to create garbage instead of creating shared rtl.
2018 This may happen even if offset is nonzero -- consider
2019 (plus (plus reg reg) const_int) -- so do this always. */
2020 addr = copy_rtx (addr);
2022 /* Convert a possibly large offset to a signed value within the
2023 range of the target address space. */
2024 pbits = GET_MODE_BITSIZE (address_mode);
2025 if (HOST_BITS_PER_WIDE_INT > pbits)
2027 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2028 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2034 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2035 object, we can merge it into the LO_SUM. */
2036 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2038 && (unsigned HOST_WIDE_INT) offset
2039 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2040 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2041 plus_constant (XEXP (addr, 1), offset));
2043 addr = plus_constant (addr, offset);
2046 new_rtx = change_address_1 (memref, mode, addr, validate);
2048 /* If the address is a REG, change_address_1 rightfully returns memref,
2049 but this would destroy memref's MEM_ATTRS. */
2050 if (new_rtx == memref && offset != 0)
2051 new_rtx = copy_rtx (new_rtx);
2053 /* Compute the new values of the memory attributes due to this adjustment.
2054 We add the offsets and update the alignment. */
2056 memoffset = GEN_INT (offset + INTVAL (memoffset));
2058 /* Compute the new alignment by taking the MIN of the alignment and the
2059 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2064 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2066 /* We can compute the size in a number of ways. */
2067 if (GET_MODE (new_rtx) != BLKmode)
2068 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
2069 else if (MEM_SIZE (memref))
2070 size = plus_constant (MEM_SIZE (memref), -offset);
2072 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2073 memoffset, size, memalign, as,
2074 GET_MODE (new_rtx));
2076 /* At some point, we should validate that this offset is within the object,
2077 if all the appropriate values are known. */
2081 /* Return a memory reference like MEMREF, but with its mode changed
2082 to MODE and its address changed to ADDR, which is assumed to be
2083 MEMREF offset by OFFSET bytes. If VALIDATE is
2084 nonzero, the memory address is forced to be valid. */
2087 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2088 HOST_WIDE_INT offset, int validate)
2090 memref = change_address_1 (memref, VOIDmode, addr, validate);
2091 return adjust_address_1 (memref, mode, offset, validate, 0);
2094 /* Return a memory reference like MEMREF, but whose address is changed by
2095 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2096 known to be in OFFSET (possibly 1). */
2099 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2101 rtx new_rtx, addr = XEXP (memref, 0);
2102 addr_space_t as = MEM_ADDR_SPACE (memref);
2103 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2105 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2107 /* At this point we don't know _why_ the address is invalid. It
2108 could have secondary memory references, multiplies or anything.
2110 However, if we did go and rearrange things, we can wind up not
2111 being able to recognize the magic around pic_offset_table_rtx.
2112 This stuff is fragile, and is yet another example of why it is
2113 bad to expose PIC machinery too early. */
2114 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, as)
2115 && GET_CODE (addr) == PLUS
2116 && XEXP (addr, 0) == pic_offset_table_rtx)
2118 addr = force_reg (GET_MODE (addr), addr);
2119 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2122 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2123 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2125 /* If there are no changes, just return the original memory reference. */
2126 if (new_rtx == memref)
2129 /* Update the alignment to reflect the offset. Reset the offset, which
2132 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2133 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2134 as, GET_MODE (new_rtx));
2138 /* Return a memory reference like MEMREF, but with its address changed to
2139 ADDR. The caller is asserting that the actual piece of memory pointed
2140 to is the same, just the form of the address is being changed, such as
2141 by putting something into a register. */
2144 replace_equiv_address (rtx memref, rtx addr)
2146 /* change_address_1 copies the memory attribute structure without change
2147 and that's exactly what we want here. */
2148 update_temp_slot_address (XEXP (memref, 0), addr);
2149 return change_address_1 (memref, VOIDmode, addr, 1);
2152 /* Likewise, but the reference is not required to be valid. */
2155 replace_equiv_address_nv (rtx memref, rtx addr)
2157 return change_address_1 (memref, VOIDmode, addr, 0);
2160 /* Return a memory reference like MEMREF, but with its mode widened to
2161 MODE and offset by OFFSET. This would be used by targets that e.g.
2162 cannot issue QImode memory operations and have to use SImode memory
2163 operations plus masking logic. */
2166 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2168 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2169 tree expr = MEM_EXPR (new_rtx);
2170 rtx memoffset = MEM_OFFSET (new_rtx);
2171 unsigned int size = GET_MODE_SIZE (mode);
2173 /* If there are no changes, just return the original memory reference. */
2174 if (new_rtx == memref)
2177 /* If we don't know what offset we were at within the expression, then
2178 we can't know if we've overstepped the bounds. */
2184 if (TREE_CODE (expr) == COMPONENT_REF)
2186 tree field = TREE_OPERAND (expr, 1);
2187 tree offset = component_ref_field_offset (expr);
2189 if (! DECL_SIZE_UNIT (field))
2195 /* Is the field at least as large as the access? If so, ok,
2196 otherwise strip back to the containing structure. */
2197 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2198 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2199 && INTVAL (memoffset) >= 0)
2202 if (! host_integerp (offset, 1))
2208 expr = TREE_OPERAND (expr, 0);
2210 = (GEN_INT (INTVAL (memoffset)
2211 + tree_low_cst (offset, 1)
2212 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2215 /* Similarly for the decl. */
2216 else if (DECL_P (expr)
2217 && DECL_SIZE_UNIT (expr)
2218 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2219 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2220 && (! memoffset || INTVAL (memoffset) >= 0))
2224 /* The widened memory access overflows the expression, which means
2225 that it could alias another expression. Zap it. */
2232 memoffset = NULL_RTX;
2234 /* The widened memory may alias other stuff, so zap the alias set. */
2235 /* ??? Maybe use get_alias_set on any remaining expression. */
2237 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2238 MEM_ALIGN (new_rtx),
2239 MEM_ADDR_SPACE (new_rtx), mode);
2244 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2245 static GTY(()) tree spill_slot_decl;
2248 get_spill_slot_decl (bool force_build_p)
2250 tree d = spill_slot_decl;
2253 if (d || !force_build_p)
2256 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2257 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2258 DECL_ARTIFICIAL (d) = 1;
2259 DECL_IGNORED_P (d) = 1;
2261 spill_slot_decl = d;
2263 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2264 MEM_NOTRAP_P (rd) = 1;
2265 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
2266 NULL_RTX, 0, ADDR_SPACE_GENERIC, BLKmode);
2267 SET_DECL_RTL (d, rd);
2272 /* Given MEM, a result from assign_stack_local, fill in the memory
2273 attributes as appropriate for a register allocator spill slot.
2274 These slots are not aliasable by other memory. We arrange for
2275 them all to use a single MEM_EXPR, so that the aliasing code can
2276 work properly in the case of shared spill slots. */
2279 set_mem_attrs_for_spill (rtx mem)
2281 alias_set_type alias;
2285 expr = get_spill_slot_decl (true);
2286 alias = MEM_ALIAS_SET (DECL_RTL (expr));
2288 /* We expect the incoming memory to be of the form:
2289 (mem:MODE (plus (reg sfp) (const_int offset)))
2290 with perhaps the plus missing for offset = 0. */
2291 addr = XEXP (mem, 0);
2292 offset = const0_rtx;
2293 if (GET_CODE (addr) == PLUS
2294 && CONST_INT_P (XEXP (addr, 1)))
2295 offset = XEXP (addr, 1);
2297 MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
2298 MEM_SIZE (mem), MEM_ALIGN (mem),
2299 ADDR_SPACE_GENERIC, GET_MODE (mem));
2300 MEM_NOTRAP_P (mem) = 1;
2303 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2306 gen_label_rtx (void)
2308 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2309 NULL, label_num++, NULL);
2312 /* For procedure integration. */
2314 /* Install new pointers to the first and last insns in the chain.
2315 Also, set cur_insn_uid to one higher than the last in use.
2316 Used for an inline-procedure after copying the insn chain. */
2319 set_new_first_and_last_insn (rtx first, rtx last)
2323 set_first_insn (first);
2324 set_last_insn (last);
2327 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2329 int debug_count = 0;
2331 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2332 cur_debug_insn_uid = 0;
2334 for (insn = first; insn; insn = NEXT_INSN (insn))
2335 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2336 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2339 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2340 if (DEBUG_INSN_P (insn))
2345 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2347 cur_debug_insn_uid++;
2350 for (insn = first; insn; insn = NEXT_INSN (insn))
2351 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2356 /* Go through all the RTL insn bodies and copy any invalid shared
2357 structure. This routine should only be called once. */
2360 unshare_all_rtl_1 (rtx insn)
2362 /* Unshare just about everything else. */
2363 unshare_all_rtl_in_chain (insn);
2365 /* Make sure the addresses of stack slots found outside the insn chain
2366 (such as, in DECL_RTL of a variable) are not shared
2367 with the insn chain.
2369 This special care is necessary when the stack slot MEM does not
2370 actually appear in the insn chain. If it does appear, its address
2371 is unshared from all else at that point. */
2372 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2375 /* Go through all the RTL insn bodies and copy any invalid shared
2376 structure, again. This is a fairly expensive thing to do so it
2377 should be done sparingly. */
2380 unshare_all_rtl_again (rtx insn)
2385 for (p = insn; p; p = NEXT_INSN (p))
2388 reset_used_flags (PATTERN (p));
2389 reset_used_flags (REG_NOTES (p));
2392 /* Make sure that virtual stack slots are not shared. */
2393 set_used_decls (DECL_INITIAL (cfun->decl));
2395 /* Make sure that virtual parameters are not shared. */
2396 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2397 set_used_flags (DECL_RTL (decl));
2399 reset_used_flags (stack_slot_list);
2401 unshare_all_rtl_1 (insn);
2405 unshare_all_rtl (void)
2407 unshare_all_rtl_1 (get_insns ());
2411 struct rtl_opt_pass pass_unshare_all_rtl =
2415 "unshare", /* name */
2417 unshare_all_rtl, /* execute */
2420 0, /* static_pass_number */
2421 TV_NONE, /* tv_id */
2422 0, /* properties_required */
2423 0, /* properties_provided */
2424 0, /* properties_destroyed */
2425 0, /* todo_flags_start */
2426 TODO_verify_rtl_sharing /* todo_flags_finish */
2431 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2432 Recursively does the same for subexpressions. */
2435 verify_rtx_sharing (rtx orig, rtx insn)
2440 const char *format_ptr;
2445 code = GET_CODE (x);
2447 /* These types may be freely shared. */
2466 /* SCRATCH must be shared because they represent distinct values. */
2468 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2473 if (shared_const_p (orig))
2478 /* A MEM is allowed to be shared if its address is constant. */
2479 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2480 || reload_completed || reload_in_progress)
2489 /* This rtx may not be shared. If it has already been seen,
2490 replace it with a copy of itself. */
2491 #ifdef ENABLE_CHECKING
2492 if (RTX_FLAG (x, used))
2494 error ("invalid rtl sharing found in the insn");
2496 error ("shared rtx");
2498 internal_error ("internal consistency failure");
2501 gcc_assert (!RTX_FLAG (x, used));
2503 RTX_FLAG (x, used) = 1;
2505 /* Now scan the subexpressions recursively. */
2507 format_ptr = GET_RTX_FORMAT (code);
2509 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2511 switch (*format_ptr++)
2514 verify_rtx_sharing (XEXP (x, i), insn);
2518 if (XVEC (x, i) != NULL)
2521 int len = XVECLEN (x, i);
2523 for (j = 0; j < len; j++)
2525 /* We allow sharing of ASM_OPERANDS inside single
2527 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2528 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2530 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2532 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2541 /* Go through all the RTL insn bodies and check that there is no unexpected
2542 sharing in between the subexpressions. */
2545 verify_rtl_sharing (void)
2549 timevar_push (TV_VERIFY_RTL_SHARING);
2551 for (p = get_insns (); p; p = NEXT_INSN (p))
2554 reset_used_flags (PATTERN (p));
2555 reset_used_flags (REG_NOTES (p));
2556 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2559 rtx q, sequence = PATTERN (p);
2561 for (i = 0; i < XVECLEN (sequence, 0); i++)
2563 q = XVECEXP (sequence, 0, i);
2564 gcc_assert (INSN_P (q));
2565 reset_used_flags (PATTERN (q));
2566 reset_used_flags (REG_NOTES (q));
2571 for (p = get_insns (); p; p = NEXT_INSN (p))
2574 verify_rtx_sharing (PATTERN (p), p);
2575 verify_rtx_sharing (REG_NOTES (p), p);
2578 timevar_pop (TV_VERIFY_RTL_SHARING);
2581 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2582 Assumes the mark bits are cleared at entry. */
2585 unshare_all_rtl_in_chain (rtx insn)
2587 for (; insn; insn = NEXT_INSN (insn))
2590 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2591 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2595 /* Go through all virtual stack slots of a function and mark them as
2596 shared. We never replace the DECL_RTLs themselves with a copy,
2597 but expressions mentioned into a DECL_RTL cannot be shared with
2598 expressions in the instruction stream.
2600 Note that reload may convert pseudo registers into memories in-place.
2601 Pseudo registers are always shared, but MEMs never are. Thus if we
2602 reset the used flags on MEMs in the instruction stream, we must set
2603 them again on MEMs that appear in DECL_RTLs. */
2606 set_used_decls (tree blk)
2611 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2612 if (DECL_RTL_SET_P (t))
2613 set_used_flags (DECL_RTL (t));
2615 /* Now process sub-blocks. */
2616 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2620 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2621 Recursively does the same for subexpressions. Uses
2622 copy_rtx_if_shared_1 to reduce stack space. */
2625 copy_rtx_if_shared (rtx orig)
2627 copy_rtx_if_shared_1 (&orig);
2631 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2632 use. Recursively does the same for subexpressions. */
2635 copy_rtx_if_shared_1 (rtx *orig1)
2641 const char *format_ptr;
2645 /* Repeat is used to turn tail-recursion into iteration. */
2652 code = GET_CODE (x);
2654 /* These types may be freely shared. */
2671 /* SCRATCH must be shared because they represent distinct values. */
2674 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2679 if (shared_const_p (x))
2689 /* The chain of insns is not being copied. */
2696 /* This rtx may not be shared. If it has already been seen,
2697 replace it with a copy of itself. */
2699 if (RTX_FLAG (x, used))
2701 x = shallow_copy_rtx (x);
2704 RTX_FLAG (x, used) = 1;
2706 /* Now scan the subexpressions recursively.
2707 We can store any replaced subexpressions directly into X
2708 since we know X is not shared! Any vectors in X
2709 must be copied if X was copied. */
2711 format_ptr = GET_RTX_FORMAT (code);
2712 length = GET_RTX_LENGTH (code);
2715 for (i = 0; i < length; i++)
2717 switch (*format_ptr++)
2721 copy_rtx_if_shared_1 (last_ptr);
2722 last_ptr = &XEXP (x, i);
2726 if (XVEC (x, i) != NULL)
2729 int len = XVECLEN (x, i);
2731 /* Copy the vector iff I copied the rtx and the length
2733 if (copied && len > 0)
2734 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2736 /* Call recursively on all inside the vector. */
2737 for (j = 0; j < len; j++)
2740 copy_rtx_if_shared_1 (last_ptr);
2741 last_ptr = &XVECEXP (x, i, j);
2756 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2759 mark_used_flags (rtx x, int flag)
2763 const char *format_ptr;
2766 /* Repeat is used to turn tail-recursion into iteration. */
2771 code = GET_CODE (x);
2773 /* These types may be freely shared so we needn't do any resetting
2798 /* The chain of insns is not being copied. */
2805 RTX_FLAG (x, used) = flag;
2807 format_ptr = GET_RTX_FORMAT (code);
2808 length = GET_RTX_LENGTH (code);
2810 for (i = 0; i < length; i++)
2812 switch (*format_ptr++)
2820 mark_used_flags (XEXP (x, i), flag);
2824 for (j = 0; j < XVECLEN (x, i); j++)
2825 mark_used_flags (XVECEXP (x, i, j), flag);
2831 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2832 to look for shared sub-parts. */
2835 reset_used_flags (rtx x)
2837 mark_used_flags (x, 0);
2840 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2841 to look for shared sub-parts. */
2844 set_used_flags (rtx x)
2846 mark_used_flags (x, 1);
2849 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2850 Return X or the rtx for the pseudo reg the value of X was copied into.
2851 OTHER must be valid as a SET_DEST. */
2854 make_safe_from (rtx x, rtx other)
2857 switch (GET_CODE (other))
2860 other = SUBREG_REG (other);
2862 case STRICT_LOW_PART:
2865 other = XEXP (other, 0);
2874 && GET_CODE (x) != SUBREG)
2876 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2877 || reg_mentioned_p (other, x))))
2879 rtx temp = gen_reg_rtx (GET_MODE (x));
2880 emit_move_insn (temp, x);
2886 /* Emission of insns (adding them to the doubly-linked list). */
2888 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2891 get_last_insn_anywhere (void)
2893 struct sequence_stack *stack;
2894 if (get_last_insn ())
2895 return get_last_insn ();
2896 for (stack = seq_stack; stack; stack = stack->next)
2897 if (stack->last != 0)
2902 /* Return the first nonnote insn emitted in current sequence or current
2903 function. This routine looks inside SEQUENCEs. */
2906 get_first_nonnote_insn (void)
2908 rtx insn = get_insns ();
2913 for (insn = next_insn (insn);
2914 insn && NOTE_P (insn);
2915 insn = next_insn (insn))
2919 if (NONJUMP_INSN_P (insn)
2920 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2921 insn = XVECEXP (PATTERN (insn), 0, 0);
2928 /* Return the last nonnote insn emitted in current sequence or current
2929 function. This routine looks inside SEQUENCEs. */
2932 get_last_nonnote_insn (void)
2934 rtx insn = get_last_insn ();
2939 for (insn = previous_insn (insn);
2940 insn && NOTE_P (insn);
2941 insn = previous_insn (insn))
2945 if (NONJUMP_INSN_P (insn)
2946 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2947 insn = XVECEXP (PATTERN (insn), 0,
2948 XVECLEN (PATTERN (insn), 0) - 1);
2955 /* Return the number of actual (non-debug) insns emitted in this
2959 get_max_insn_count (void)
2961 int n = cur_insn_uid;
2963 /* The table size must be stable across -g, to avoid codegen
2964 differences due to debug insns, and not be affected by
2965 -fmin-insn-uid, to avoid excessive table size and to simplify
2966 debugging of -fcompare-debug failures. */
2967 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
2968 n -= cur_debug_insn_uid;
2970 n -= MIN_NONDEBUG_INSN_UID;
2976 /* Return the next insn. If it is a SEQUENCE, return the first insn
2980 next_insn (rtx insn)
2984 insn = NEXT_INSN (insn);
2985 if (insn && NONJUMP_INSN_P (insn)
2986 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2987 insn = XVECEXP (PATTERN (insn), 0, 0);
2993 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2997 previous_insn (rtx insn)
3001 insn = PREV_INSN (insn);
3002 if (insn && NONJUMP_INSN_P (insn)
3003 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3004 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3010 /* Return the next insn after INSN that is not a NOTE. This routine does not
3011 look inside SEQUENCEs. */
3014 next_nonnote_insn (rtx insn)
3018 insn = NEXT_INSN (insn);
3019 if (insn == 0 || !NOTE_P (insn))
3026 /* Return the next insn after INSN that is not a NOTE, but stop the
3027 search before we enter another basic block. This routine does not
3028 look inside SEQUENCEs. */
3031 next_nonnote_insn_bb (rtx insn)
3035 insn = NEXT_INSN (insn);
3036 if (insn == 0 || !NOTE_P (insn))
3038 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3045 /* Return the previous insn before INSN that is not a NOTE. This routine does
3046 not look inside SEQUENCEs. */
3049 prev_nonnote_insn (rtx insn)
3053 insn = PREV_INSN (insn);
3054 if (insn == 0 || !NOTE_P (insn))
3061 /* Return the previous insn before INSN that is not a NOTE, but stop
3062 the search before we enter another basic block. This routine does
3063 not look inside SEQUENCEs. */
3066 prev_nonnote_insn_bb (rtx insn)
3070 insn = PREV_INSN (insn);
3071 if (insn == 0 || !NOTE_P (insn))
3073 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3080 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3081 routine does not look inside SEQUENCEs. */
3084 next_nondebug_insn (rtx insn)
3088 insn = NEXT_INSN (insn);
3089 if (insn == 0 || !DEBUG_INSN_P (insn))
3096 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3097 This routine does not look inside SEQUENCEs. */
3100 prev_nondebug_insn (rtx insn)
3104 insn = PREV_INSN (insn);
3105 if (insn == 0 || !DEBUG_INSN_P (insn))
3112 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3113 This routine does not look inside SEQUENCEs. */
3116 next_nonnote_nondebug_insn (rtx insn)
3120 insn = NEXT_INSN (insn);
3121 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3128 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3129 This routine does not look inside SEQUENCEs. */
3132 prev_nonnote_nondebug_insn (rtx insn)
3136 insn = PREV_INSN (insn);
3137 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3144 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3145 or 0, if there is none. This routine does not look inside
3149 next_real_insn (rtx insn)
3153 insn = NEXT_INSN (insn);
3154 if (insn == 0 || INSN_P (insn))
3161 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3162 or 0, if there is none. This routine does not look inside
3166 prev_real_insn (rtx insn)
3170 insn = PREV_INSN (insn);
3171 if (insn == 0 || INSN_P (insn))
3178 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3179 This routine does not look inside SEQUENCEs. */
3182 last_call_insn (void)
3186 for (insn = get_last_insn ();
3187 insn && !CALL_P (insn);
3188 insn = PREV_INSN (insn))
3194 /* Find the next insn after INSN that really does something. This routine
3195 does not look inside SEQUENCEs. After reload this also skips over
3196 standalone USE and CLOBBER insn. */
3199 active_insn_p (const_rtx insn)
3201 return (CALL_P (insn) || JUMP_P (insn)
3202 || (NONJUMP_INSN_P (insn)
3203 && (! reload_completed
3204 || (GET_CODE (PATTERN (insn)) != USE
3205 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3209 next_active_insn (rtx insn)
3213 insn = NEXT_INSN (insn);
3214 if (insn == 0 || active_insn_p (insn))
3221 /* Find the last insn before INSN that really does something. This routine
3222 does not look inside SEQUENCEs. After reload this also skips over
3223 standalone USE and CLOBBER insn. */
3226 prev_active_insn (rtx insn)
3230 insn = PREV_INSN (insn);
3231 if (insn == 0 || active_insn_p (insn))
3238 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3241 next_label (rtx insn)
3245 insn = NEXT_INSN (insn);
3246 if (insn == 0 || LABEL_P (insn))
3253 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3256 prev_label (rtx insn)
3260 insn = PREV_INSN (insn);
3261 if (insn == 0 || LABEL_P (insn))
3268 /* Return the last label to mark the same position as LABEL. Return null
3269 if LABEL itself is null. */
3272 skip_consecutive_labels (rtx label)
3276 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3284 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3285 and REG_CC_USER notes so we can find it. */
3288 link_cc0_insns (rtx insn)
3290 rtx user = next_nonnote_insn (insn);
3292 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3293 user = XVECEXP (PATTERN (user), 0, 0);
3295 add_reg_note (user, REG_CC_SETTER, insn);
3296 add_reg_note (insn, REG_CC_USER, user);
3299 /* Return the next insn that uses CC0 after INSN, which is assumed to
3300 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3301 applied to the result of this function should yield INSN).
3303 Normally, this is simply the next insn. However, if a REG_CC_USER note
3304 is present, it contains the insn that uses CC0.
3306 Return 0 if we can't find the insn. */
3309 next_cc0_user (rtx insn)
3311 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3314 return XEXP (note, 0);
3316 insn = next_nonnote_insn (insn);
3317 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3318 insn = XVECEXP (PATTERN (insn), 0, 0);
3320 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3326 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3327 note, it is the previous insn. */
3330 prev_cc0_setter (rtx insn)
3332 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3335 return XEXP (note, 0);
3337 insn = prev_nonnote_insn (insn);
3338 gcc_assert (sets_cc0_p (PATTERN (insn)));
3345 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3348 find_auto_inc (rtx *xp, void *data)
3351 rtx reg = (rtx) data;
3353 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3356 switch (GET_CODE (x))
3364 if (rtx_equal_p (reg, XEXP (x, 0)))
3375 /* Increment the label uses for all labels present in rtx. */
3378 mark_label_nuses (rtx x)
3384 code = GET_CODE (x);
3385 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3386 LABEL_NUSES (XEXP (x, 0))++;
3388 fmt = GET_RTX_FORMAT (code);
3389 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3392 mark_label_nuses (XEXP (x, i));
3393 else if (fmt[i] == 'E')
3394 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3395 mark_label_nuses (XVECEXP (x, i, j));
3400 /* Try splitting insns that can be split for better scheduling.
3401 PAT is the pattern which might split.
3402 TRIAL is the insn providing PAT.
3403 LAST is nonzero if we should return the last insn of the sequence produced.
3405 If this routine succeeds in splitting, it returns the first or last
3406 replacement insn depending on the value of LAST. Otherwise, it
3407 returns TRIAL. If the insn to be returned can be split, it will be. */
3410 try_split (rtx pat, rtx trial, int last)
3412 rtx before = PREV_INSN (trial);
3413 rtx after = NEXT_INSN (trial);
3414 int has_barrier = 0;
3417 rtx insn_last, insn;
3420 /* We're not good at redistributing frame information. */
3421 if (RTX_FRAME_RELATED_P (trial))
3424 if (any_condjump_p (trial)
3425 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3426 split_branch_probability = INTVAL (XEXP (note, 0));
3427 probability = split_branch_probability;
3429 seq = split_insns (pat, trial);
3431 split_branch_probability = -1;
3433 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3434 We may need to handle this specially. */
3435 if (after && BARRIER_P (after))
3438 after = NEXT_INSN (after);
3444 /* Avoid infinite loop if any insn of the result matches
3445 the original pattern. */
3449 if (INSN_P (insn_last)
3450 && rtx_equal_p (PATTERN (insn_last), pat))
3452 if (!NEXT_INSN (insn_last))
3454 insn_last = NEXT_INSN (insn_last);
3457 /* We will be adding the new sequence to the function. The splitters
3458 may have introduced invalid RTL sharing, so unshare the sequence now. */
3459 unshare_all_rtl_in_chain (seq);
3462 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3466 mark_jump_label (PATTERN (insn), insn, 0);
3468 if (probability != -1
3469 && any_condjump_p (insn)
3470 && !find_reg_note (insn, REG_BR_PROB, 0))
3472 /* We can preserve the REG_BR_PROB notes only if exactly
3473 one jump is created, otherwise the machine description
3474 is responsible for this step using
3475 split_branch_probability variable. */
3476 gcc_assert (njumps == 1);
3477 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3482 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3483 in SEQ and copy any additional information across. */
3486 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3491 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3492 target may have explicitly specified. */
3493 p = &CALL_INSN_FUNCTION_USAGE (insn);
3496 *p = CALL_INSN_FUNCTION_USAGE (trial);
3498 /* If the old call was a sibling call, the new one must
3500 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3502 /* If the new call is the last instruction in the sequence,
3503 it will effectively replace the old call in-situ. Otherwise
3504 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3505 so that it comes immediately after the new call. */
3506 if (NEXT_INSN (insn))
3507 for (next = NEXT_INSN (trial);
3508 next && NOTE_P (next);
3509 next = NEXT_INSN (next))
3510 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3513 add_insn_after (next, insn, NULL);
3519 /* Copy notes, particularly those related to the CFG. */
3520 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3522 switch (REG_NOTE_KIND (note))
3525 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3530 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3533 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3537 case REG_NON_LOCAL_GOTO:
3538 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3541 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3547 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3549 rtx reg = XEXP (note, 0);
3550 if (!FIND_REG_INC_NOTE (insn, reg)
3551 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3552 add_reg_note (insn, REG_INC, reg);
3562 /* If there are LABELS inside the split insns increment the
3563 usage count so we don't delete the label. */
3567 while (insn != NULL_RTX)
3569 /* JUMP_P insns have already been "marked" above. */
3570 if (NONJUMP_INSN_P (insn))
3571 mark_label_nuses (PATTERN (insn));
3573 insn = PREV_INSN (insn);
3577 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3579 delete_insn (trial);
3581 emit_barrier_after (tem);
3583 /* Recursively call try_split for each new insn created; by the
3584 time control returns here that insn will be fully split, so
3585 set LAST and continue from the insn after the one returned.
3586 We can't use next_active_insn here since AFTER may be a note.
3587 Ignore deleted insns, which can be occur if not optimizing. */
3588 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3589 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3590 tem = try_split (PATTERN (tem), tem, 1);
3592 /* Return either the first or the last insn, depending on which was
3595 ? (after ? PREV_INSN (after) : get_last_insn ())
3596 : NEXT_INSN (before);
3599 /* Make and return an INSN rtx, initializing all its slots.
3600 Store PATTERN in the pattern slots. */
3603 make_insn_raw (rtx pattern)
3607 insn = rtx_alloc (INSN);
3609 INSN_UID (insn) = cur_insn_uid++;
3610 PATTERN (insn) = pattern;
3611 INSN_CODE (insn) = -1;
3612 REG_NOTES (insn) = NULL;
3613 INSN_LOCATOR (insn) = curr_insn_locator ();
3614 BLOCK_FOR_INSN (insn) = NULL;
3616 #ifdef ENABLE_RTL_CHECKING
3619 && (returnjump_p (insn)
3620 || (GET_CODE (insn) == SET
3621 && SET_DEST (insn) == pc_rtx)))
3623 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3631 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3634 make_debug_insn_raw (rtx pattern)
3638 insn = rtx_alloc (DEBUG_INSN);
3639 INSN_UID (insn) = cur_debug_insn_uid++;
3640 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3641 INSN_UID (insn) = cur_insn_uid++;
3643 PATTERN (insn) = pattern;
3644 INSN_CODE (insn) = -1;
3645 REG_NOTES (insn) = NULL;
3646 INSN_LOCATOR (insn) = curr_insn_locator ();
3647 BLOCK_FOR_INSN (insn) = NULL;
3652 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3655 make_jump_insn_raw (rtx pattern)
3659 insn = rtx_alloc (JUMP_INSN);
3660 INSN_UID (insn) = cur_insn_uid++;
3662 PATTERN (insn) = pattern;
3663 INSN_CODE (insn) = -1;
3664 REG_NOTES (insn) = NULL;
3665 JUMP_LABEL (insn) = NULL;
3666 INSN_LOCATOR (insn) = curr_insn_locator ();
3667 BLOCK_FOR_INSN (insn) = NULL;
3672 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3675 make_call_insn_raw (rtx pattern)
3679 insn = rtx_alloc (CALL_INSN);
3680 INSN_UID (insn) = cur_insn_uid++;
3682 PATTERN (insn) = pattern;
3683 INSN_CODE (insn) = -1;
3684 REG_NOTES (insn) = NULL;
3685 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3686 INSN_LOCATOR (insn) = curr_insn_locator ();
3687 BLOCK_FOR_INSN (insn) = NULL;
3692 /* Add INSN to the end of the doubly-linked list.
3693 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3698 PREV_INSN (insn) = get_last_insn();
3699 NEXT_INSN (insn) = 0;
3701 if (NULL != get_last_insn())
3702 NEXT_INSN (get_last_insn ()) = insn;
3704 if (NULL == get_insns ())
3705 set_first_insn (insn);
3707 set_last_insn (insn);
3710 /* Add INSN into the doubly-linked list after insn AFTER. This and
3711 the next should be the only functions called to insert an insn once
3712 delay slots have been filled since only they know how to update a
3716 add_insn_after (rtx insn, rtx after, basic_block bb)
3718 rtx next = NEXT_INSN (after);
3720 gcc_assert (!optimize || !INSN_DELETED_P (after));
3722 NEXT_INSN (insn) = next;
3723 PREV_INSN (insn) = after;
3727 PREV_INSN (next) = insn;
3728 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3729 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3731 else if (get_last_insn () == after)
3732 set_last_insn (insn);
3735 struct sequence_stack *stack = seq_stack;
3736 /* Scan all pending sequences too. */
3737 for (; stack; stack = stack->next)
3738 if (after == stack->last)
3747 if (!BARRIER_P (after)
3748 && !BARRIER_P (insn)
3749 && (bb = BLOCK_FOR_INSN (after)))
3751 set_block_for_insn (insn, bb);
3753 df_insn_rescan (insn);
3754 /* Should not happen as first in the BB is always
3755 either NOTE or LABEL. */
3756 if (BB_END (bb) == after
3757 /* Avoid clobbering of structure when creating new BB. */
3758 && !BARRIER_P (insn)
3759 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3763 NEXT_INSN (after) = insn;
3764 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3766 rtx sequence = PATTERN (after);
3767 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3771 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3772 the previous should be the only functions called to insert an insn
3773 once delay slots have been filled since only they know how to
3774 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3778 add_insn_before (rtx insn, rtx before, basic_block bb)
3780 rtx prev = PREV_INSN (before);
3782 gcc_assert (!optimize || !INSN_DELETED_P (before));
3784 PREV_INSN (insn) = prev;
3785 NEXT_INSN (insn) = before;
3789 NEXT_INSN (prev) = insn;
3790 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3792 rtx sequence = PATTERN (prev);
3793 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3796 else if (get_insns () == before)
3797 set_first_insn (insn);
3800 struct sequence_stack *stack = seq_stack;
3801 /* Scan all pending sequences too. */
3802 for (; stack; stack = stack->next)
3803 if (before == stack->first)
3805 stack->first = insn;
3813 && !BARRIER_P (before)
3814 && !BARRIER_P (insn))
3815 bb = BLOCK_FOR_INSN (before);
3819 set_block_for_insn (insn, bb);
3821 df_insn_rescan (insn);
3822 /* Should not happen as first in the BB is always either NOTE or
3824 gcc_assert (BB_HEAD (bb) != insn
3825 /* Avoid clobbering of structure when creating new BB. */
3827 || NOTE_INSN_BASIC_BLOCK_P (insn));
3830 PREV_INSN (before) = insn;
3831 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3832 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3836 /* Replace insn with an deleted instruction note. */
3839 set_insn_deleted (rtx insn)
3841 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3842 PUT_CODE (insn, NOTE);
3843 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3847 /* Remove an insn from its doubly-linked list. This function knows how
3848 to handle sequences. */
3850 remove_insn (rtx insn)
3852 rtx next = NEXT_INSN (insn);
3853 rtx prev = PREV_INSN (insn);
3856 /* Later in the code, the block will be marked dirty. */
3857 df_insn_delete (NULL, INSN_UID (insn));
3861 NEXT_INSN (prev) = next;
3862 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3864 rtx sequence = PATTERN (prev);
3865 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3868 else if (get_insns () == insn)
3871 PREV_INSN (next) = NULL;
3872 set_first_insn (next);
3876 struct sequence_stack *stack = seq_stack;
3877 /* Scan all pending sequences too. */
3878 for (; stack; stack = stack->next)
3879 if (insn == stack->first)
3881 stack->first = next;
3890 PREV_INSN (next) = prev;
3891 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3892 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3894 else if (get_last_insn () == insn)
3895 set_last_insn (prev);
3898 struct sequence_stack *stack = seq_stack;
3899 /* Scan all pending sequences too. */
3900 for (; stack; stack = stack->next)
3901 if (insn == stack->last)
3909 if (!BARRIER_P (insn)
3910 && (bb = BLOCK_FOR_INSN (insn)))
3912 if (NONDEBUG_INSN_P (insn))
3913 df_set_bb_dirty (bb);
3914 if (BB_HEAD (bb) == insn)
3916 /* Never ever delete the basic block note without deleting whole
3918 gcc_assert (!NOTE_P (insn));
3919 BB_HEAD (bb) = next;
3921 if (BB_END (bb) == insn)
3926 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3929 add_function_usage_to (rtx call_insn, rtx call_fusage)
3931 gcc_assert (call_insn && CALL_P (call_insn));
3933 /* Put the register usage information on the CALL. If there is already
3934 some usage information, put ours at the end. */
3935 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3939 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3940 link = XEXP (link, 1))
3943 XEXP (link, 1) = call_fusage;
3946 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3949 /* Delete all insns made since FROM.
3950 FROM becomes the new last instruction. */
3953 delete_insns_since (rtx from)
3958 NEXT_INSN (from) = 0;
3959 set_last_insn (from);
3962 /* This function is deprecated, please use sequences instead.
3964 Move a consecutive bunch of insns to a different place in the chain.
3965 The insns to be moved are those between FROM and TO.
3966 They are moved to a new position after the insn AFTER.
3967 AFTER must not be FROM or TO or any insn in between.
3969 This function does not know about SEQUENCEs and hence should not be
3970 called after delay-slot filling has been done. */
3973 reorder_insns_nobb (rtx from, rtx to, rtx after)
3975 #ifdef ENABLE_CHECKING
3977 for (x = from; x != to; x = NEXT_INSN (x))
3978 gcc_assert (after != x);
3979 gcc_assert (after != to);
3982 /* Splice this bunch out of where it is now. */
3983 if (PREV_INSN (from))
3984 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3986 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3987 if (get_last_insn () == to)
3988 set_last_insn (PREV_INSN (from));
3989 if (get_insns () == from)
3990 set_first_insn (NEXT_INSN (to));
3992 /* Make the new neighbors point to it and it to them. */
3993 if (NEXT_INSN (after))
3994 PREV_INSN (NEXT_INSN (after)) = to;
3996 NEXT_INSN (to) = NEXT_INSN (after);
3997 PREV_INSN (from) = after;
3998 NEXT_INSN (after) = from;
3999 if (after == get_last_insn())
4003 /* Same as function above, but take care to update BB boundaries. */
4005 reorder_insns (rtx from, rtx to, rtx after)
4007 rtx prev = PREV_INSN (from);
4008 basic_block bb, bb2;
4010 reorder_insns_nobb (from, to, after);
4012 if (!BARRIER_P (after)
4013 && (bb = BLOCK_FOR_INSN (after)))
4016 df_set_bb_dirty (bb);
4018 if (!BARRIER_P (from)
4019 && (bb2 = BLOCK_FOR_INSN (from)))
4021 if (BB_END (bb2) == to)
4022 BB_END (bb2) = prev;
4023 df_set_bb_dirty (bb2);
4026 if (BB_END (bb) == after)
4029 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4031 df_insn_change_bb (x, bb);
4036 /* Emit insn(s) of given code and pattern
4037 at a specified place within the doubly-linked list.
4039 All of the emit_foo global entry points accept an object
4040 X which is either an insn list or a PATTERN of a single
4043 There are thus a few canonical ways to generate code and
4044 emit it at a specific place in the instruction stream. For
4045 example, consider the instruction named SPOT and the fact that
4046 we would like to emit some instructions before SPOT. We might
4050 ... emit the new instructions ...
4051 insns_head = get_insns ();
4054 emit_insn_before (insns_head, SPOT);
4056 It used to be common to generate SEQUENCE rtl instead, but that
4057 is a relic of the past which no longer occurs. The reason is that
4058 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4059 generated would almost certainly die right after it was created. */
4062 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4063 rtx (*make_raw) (rtx))
4067 gcc_assert (before);
4072 switch (GET_CODE (x))
4084 rtx next = NEXT_INSN (insn);
4085 add_insn_before (insn, before, bb);
4091 #ifdef ENABLE_RTL_CHECKING
4098 last = (*make_raw) (x);
4099 add_insn_before (last, before, bb);
4106 /* Make X be output before the instruction BEFORE. */
4109 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4111 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4114 /* Make an instruction with body X and code JUMP_INSN
4115 and output it before the instruction BEFORE. */
4118 emit_jump_insn_before_noloc (rtx x, rtx before)
4120 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4121 make_jump_insn_raw);
4124 /* Make an instruction with body X and code CALL_INSN
4125 and output it before the instruction BEFORE. */
4128 emit_call_insn_before_noloc (rtx x, rtx before)
4130 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4131 make_call_insn_raw);
4134 /* Make an instruction with body X and code DEBUG_INSN
4135 and output it before the instruction BEFORE. */
4138 emit_debug_insn_before_noloc (rtx x, rtx before)
4140 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4141 make_debug_insn_raw);
4144 /* Make an insn of code BARRIER
4145 and output it before the insn BEFORE. */
4148 emit_barrier_before (rtx before)
4150 rtx insn = rtx_alloc (BARRIER);
4152 INSN_UID (insn) = cur_insn_uid++;
4154 add_insn_before (insn, before, NULL);
4158 /* Emit the label LABEL before the insn BEFORE. */
4161 emit_label_before (rtx label, rtx before)
4163 /* This can be called twice for the same label as a result of the
4164 confusion that follows a syntax error! So make it harmless. */
4165 if (INSN_UID (label) == 0)
4167 INSN_UID (label) = cur_insn_uid++;
4168 add_insn_before (label, before, NULL);
4174 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4177 emit_note_before (enum insn_note subtype, rtx before)
4179 rtx note = rtx_alloc (NOTE);
4180 INSN_UID (note) = cur_insn_uid++;
4181 NOTE_KIND (note) = subtype;
4182 BLOCK_FOR_INSN (note) = NULL;
4183 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4185 add_insn_before (note, before, NULL);
4189 /* Helper for emit_insn_after, handles lists of instructions
4193 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4197 if (!bb && !BARRIER_P (after))
4198 bb = BLOCK_FOR_INSN (after);
4202 df_set_bb_dirty (bb);
4203 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4204 if (!BARRIER_P (last))
4206 set_block_for_insn (last, bb);
4207 df_insn_rescan (last);
4209 if (!BARRIER_P (last))
4211 set_block_for_insn (last, bb);
4212 df_insn_rescan (last);
4214 if (BB_END (bb) == after)
4218 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4221 after_after = NEXT_INSN (after);
4223 NEXT_INSN (after) = first;
4224 PREV_INSN (first) = after;
4225 NEXT_INSN (last) = after_after;
4227 PREV_INSN (after_after) = last;
4229 if (after == get_last_insn())
4230 set_last_insn (last);
4236 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4237 rtx (*make_raw)(rtx))
4246 switch (GET_CODE (x))
4255 last = emit_insn_after_1 (x, after, bb);
4258 #ifdef ENABLE_RTL_CHECKING
4265 last = (*make_raw) (x);
4266 add_insn_after (last, after, bb);
4273 /* Make X be output after the insn AFTER and set the BB of insn. If
4274 BB is NULL, an attempt is made to infer the BB from AFTER. */
4277 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4279 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4283 /* Make an insn of code JUMP_INSN with body X
4284 and output it after the insn AFTER. */
4287 emit_jump_insn_after_noloc (rtx x, rtx after)
4289 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4292 /* Make an instruction with body X and code CALL_INSN
4293 and output it after the instruction AFTER. */
4296 emit_call_insn_after_noloc (rtx x, rtx after)
4298 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4301 /* Make an instruction with body X and code CALL_INSN
4302 and output it after the instruction AFTER. */
4305 emit_debug_insn_after_noloc (rtx x, rtx after)
4307 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4310 /* Make an insn of code BARRIER
4311 and output it after the insn AFTER. */
4314 emit_barrier_after (rtx after)
4316 rtx insn = rtx_alloc (BARRIER);
4318 INSN_UID (insn) = cur_insn_uid++;
4320 add_insn_after (insn, after, NULL);
4324 /* Emit the label LABEL after the insn AFTER. */
4327 emit_label_after (rtx label, rtx after)
4329 /* This can be called twice for the same label
4330 as a result of the confusion that follows a syntax error!
4331 So make it harmless. */
4332 if (INSN_UID (label) == 0)
4334 INSN_UID (label) = cur_insn_uid++;
4335 add_insn_after (label, after, NULL);
4341 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4344 emit_note_after (enum insn_note subtype, rtx after)
4346 rtx note = rtx_alloc (NOTE);
4347 INSN_UID (note) = cur_insn_uid++;
4348 NOTE_KIND (note) = subtype;
4349 BLOCK_FOR_INSN (note) = NULL;
4350 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4351 add_insn_after (note, after, NULL);
4355 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4356 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4359 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4360 rtx (*make_raw) (rtx))
4362 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4364 if (pattern == NULL_RTX || !loc)
4367 after = NEXT_INSN (after);
4370 if (active_insn_p (after) && !INSN_LOCATOR (after))
4371 INSN_LOCATOR (after) = loc;
4374 after = NEXT_INSN (after);
4379 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4380 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4384 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4385 rtx (*make_raw) (rtx))
4389 if (skip_debug_insns)
4390 while (DEBUG_INSN_P (prev))
4391 prev = PREV_INSN (prev);
4394 return emit_pattern_after_setloc (pattern, after, INSN_LOCATOR (prev),
4397 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4400 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4402 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4404 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4407 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4409 emit_insn_after (rtx pattern, rtx after)
4411 return emit_pattern_after (pattern, after, true, make_insn_raw);
4414 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4416 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4418 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4421 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4423 emit_jump_insn_after (rtx pattern, rtx after)
4425 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4428 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4430 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4432 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4435 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4437 emit_call_insn_after (rtx pattern, rtx after)
4439 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4442 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4444 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4446 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4449 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4451 emit_debug_insn_after (rtx pattern, rtx after)
4453 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4456 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4457 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4458 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4462 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4463 rtx (*make_raw) (rtx))
4465 rtx first = PREV_INSN (before);
4466 rtx last = emit_pattern_before_noloc (pattern, before,
4467 insnp ? before : NULL_RTX,
4470 if (pattern == NULL_RTX || !loc)
4474 first = get_insns ();
4476 first = NEXT_INSN (first);
4479 if (active_insn_p (first) && !INSN_LOCATOR (first))
4480 INSN_LOCATOR (first) = loc;
4483 first = NEXT_INSN (first);
4488 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4489 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4490 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4491 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4494 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4495 bool insnp, rtx (*make_raw) (rtx))
4499 if (skip_debug_insns)
4500 while (DEBUG_INSN_P (next))
4501 next = PREV_INSN (next);
4504 return emit_pattern_before_setloc (pattern, before, INSN_LOCATOR (next),
4507 return emit_pattern_before_noloc (pattern, before,
4508 insnp ? before : NULL_RTX,
4512 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4514 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4516 return emit_pattern_before_setloc (pattern, before, loc, true,
4520 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4522 emit_insn_before (rtx pattern, rtx before)
4524 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4527 /* like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4529 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4531 return emit_pattern_before_setloc (pattern, before, loc, false,
4532 make_jump_insn_raw);
4535 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4537 emit_jump_insn_before (rtx pattern, rtx before)
4539 return emit_pattern_before (pattern, before, true, false,
4540 make_jump_insn_raw);
4543 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4545 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4547 return emit_pattern_before_setloc (pattern, before, loc, false,
4548 make_call_insn_raw);
4551 /* Like emit_call_insn_before_noloc,
4552 but set insn_locator according to BEFORE. */
4554 emit_call_insn_before (rtx pattern, rtx before)
4556 return emit_pattern_before (pattern, before, true, false,
4557 make_call_insn_raw);
4560 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4562 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4564 return emit_pattern_before_setloc (pattern, before, loc, false,
4565 make_debug_insn_raw);
4568 /* Like emit_debug_insn_before_noloc,
4569 but set insn_locator according to BEFORE. */
4571 emit_debug_insn_before (rtx pattern, rtx before)
4573 return emit_pattern_before (pattern, before, false, false,
4574 make_debug_insn_raw);
4577 /* Take X and emit it at the end of the doubly-linked
4580 Returns the last insn emitted. */
4585 rtx last = get_last_insn();
4591 switch (GET_CODE (x))
4603 rtx next = NEXT_INSN (insn);
4610 #ifdef ENABLE_RTL_CHECKING
4617 last = make_insn_raw (x);
4625 /* Make an insn of code DEBUG_INSN with pattern X
4626 and add it to the end of the doubly-linked list. */
4629 emit_debug_insn (rtx x)
4631 rtx last = get_last_insn();
4637 switch (GET_CODE (x))
4649 rtx next = NEXT_INSN (insn);
4656 #ifdef ENABLE_RTL_CHECKING
4663 last = make_debug_insn_raw (x);
4671 /* Make an insn of code JUMP_INSN with pattern X
4672 and add it to the end of the doubly-linked list. */
4675 emit_jump_insn (rtx x)
4677 rtx last = NULL_RTX, insn;
4679 switch (GET_CODE (x))
4691 rtx next = NEXT_INSN (insn);
4698 #ifdef ENABLE_RTL_CHECKING
4705 last = make_jump_insn_raw (x);
4713 /* Make an insn of code CALL_INSN with pattern X
4714 and add it to the end of the doubly-linked list. */
4717 emit_call_insn (rtx x)
4721 switch (GET_CODE (x))
4730 insn = emit_insn (x);
4733 #ifdef ENABLE_RTL_CHECKING
4740 insn = make_call_insn_raw (x);
4748 /* Add the label LABEL to the end of the doubly-linked list. */
4751 emit_label (rtx label)
4753 /* This can be called twice for the same label
4754 as a result of the confusion that follows a syntax error!
4755 So make it harmless. */
4756 if (INSN_UID (label) == 0)
4758 INSN_UID (label) = cur_insn_uid++;
4764 /* Make an insn of code BARRIER
4765 and add it to the end of the doubly-linked list. */
4770 rtx barrier = rtx_alloc (BARRIER);
4771 INSN_UID (barrier) = cur_insn_uid++;
4776 /* Emit a copy of note ORIG. */
4779 emit_note_copy (rtx orig)
4783 note = rtx_alloc (NOTE);
4785 INSN_UID (note) = cur_insn_uid++;
4786 NOTE_DATA (note) = NOTE_DATA (orig);
4787 NOTE_KIND (note) = NOTE_KIND (orig);
4788 BLOCK_FOR_INSN (note) = NULL;
4794 /* Make an insn of code NOTE or type NOTE_NO
4795 and add it to the end of the doubly-linked list. */
4798 emit_note (enum insn_note kind)
4802 note = rtx_alloc (NOTE);
4803 INSN_UID (note) = cur_insn_uid++;
4804 NOTE_KIND (note) = kind;
4805 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4806 BLOCK_FOR_INSN (note) = NULL;
4811 /* Emit a clobber of lvalue X. */
4814 emit_clobber (rtx x)
4816 /* CONCATs should not appear in the insn stream. */
4817 if (GET_CODE (x) == CONCAT)
4819 emit_clobber (XEXP (x, 0));
4820 return emit_clobber (XEXP (x, 1));
4822 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4825 /* Return a sequence of insns to clobber lvalue X. */
4839 /* Emit a use of rvalue X. */
4844 /* CONCATs should not appear in the insn stream. */
4845 if (GET_CODE (x) == CONCAT)
4847 emit_use (XEXP (x, 0));
4848 return emit_use (XEXP (x, 1));
4850 return emit_insn (gen_rtx_USE (VOIDmode, x));
4853 /* Return a sequence of insns to use rvalue X. */
4867 /* Cause next statement to emit a line note even if the line number
4871 force_next_line_note (void)
4876 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4877 note of this type already exists, remove it first. */
4880 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4882 rtx note = find_reg_note (insn, kind, NULL_RTX);
4888 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4889 has multiple sets (some callers assume single_set
4890 means the insn only has one set, when in fact it
4891 means the insn only has one * useful * set). */
4892 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4898 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4899 It serves no useful purpose and breaks eliminate_regs. */
4900 if (GET_CODE (datum) == ASM_OPERANDS)
4905 XEXP (note, 0) = datum;
4906 df_notes_rescan (insn);
4914 XEXP (note, 0) = datum;
4920 add_reg_note (insn, kind, datum);
4926 df_notes_rescan (insn);
4932 return REG_NOTES (insn);
4935 /* Return an indication of which type of insn should have X as a body.
4936 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4938 static enum rtx_code
4939 classify_insn (rtx x)
4943 if (GET_CODE (x) == CALL)
4945 if (GET_CODE (x) == RETURN)
4947 if (GET_CODE (x) == SET)
4949 if (SET_DEST (x) == pc_rtx)
4951 else if (GET_CODE (SET_SRC (x)) == CALL)
4956 if (GET_CODE (x) == PARALLEL)
4959 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4960 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4962 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4963 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4965 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4966 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4972 /* Emit the rtl pattern X as an appropriate kind of insn.
4973 If X is a label, it is simply added into the insn chain. */
4978 enum rtx_code code = classify_insn (x);
4983 return emit_label (x);
4985 return emit_insn (x);
4988 rtx insn = emit_jump_insn (x);
4989 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4990 return emit_barrier ();
4994 return emit_call_insn (x);
4996 return emit_debug_insn (x);
5002 /* Space for free sequence stack entries. */
5003 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5005 /* Begin emitting insns to a sequence. If this sequence will contain
5006 something that might cause the compiler to pop arguments to function
5007 calls (because those pops have previously been deferred; see
5008 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5009 before calling this function. That will ensure that the deferred
5010 pops are not accidentally emitted in the middle of this sequence. */
5013 start_sequence (void)
5015 struct sequence_stack *tem;
5017 if (free_sequence_stack != NULL)
5019 tem = free_sequence_stack;
5020 free_sequence_stack = tem->next;
5023 tem = ggc_alloc_sequence_stack ();
5025 tem->next = seq_stack;
5026 tem->first = get_insns ();
5027 tem->last = get_last_insn ();
5035 /* Set up the insn chain starting with FIRST as the current sequence,
5036 saving the previously current one. See the documentation for
5037 start_sequence for more information about how to use this function. */
5040 push_to_sequence (rtx first)
5046 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5049 set_first_insn (first);
5050 set_last_insn (last);
5053 /* Like push_to_sequence, but take the last insn as an argument to avoid
5054 looping through the list. */
5057 push_to_sequence2 (rtx first, rtx last)
5061 set_first_insn (first);
5062 set_last_insn (last);
5065 /* Set up the outer-level insn chain
5066 as the current sequence, saving the previously current one. */
5069 push_topmost_sequence (void)
5071 struct sequence_stack *stack, *top = NULL;
5075 for (stack = seq_stack; stack; stack = stack->next)
5078 set_first_insn (top->first);
5079 set_last_insn (top->last);
5082 /* After emitting to the outer-level insn chain, update the outer-level
5083 insn chain, and restore the previous saved state. */
5086 pop_topmost_sequence (void)
5088 struct sequence_stack *stack, *top = NULL;
5090 for (stack = seq_stack; stack; stack = stack->next)
5093 top->first = get_insns ();
5094 top->last = get_last_insn ();
5099 /* After emitting to a sequence, restore previous saved state.
5101 To get the contents of the sequence just made, you must call
5102 `get_insns' *before* calling here.
5104 If the compiler might have deferred popping arguments while
5105 generating this sequence, and this sequence will not be immediately
5106 inserted into the instruction stream, use do_pending_stack_adjust
5107 before calling get_insns. That will ensure that the deferred
5108 pops are inserted into this sequence, and not into some random
5109 location in the instruction stream. See INHIBIT_DEFER_POP for more
5110 information about deferred popping of arguments. */
5115 struct sequence_stack *tem = seq_stack;
5117 set_first_insn (tem->first);
5118 set_last_insn (tem->last);
5119 seq_stack = tem->next;
5121 memset (tem, 0, sizeof (*tem));
5122 tem->next = free_sequence_stack;
5123 free_sequence_stack = tem;
5126 /* Return 1 if currently emitting into a sequence. */
5129 in_sequence_p (void)
5131 return seq_stack != 0;
5134 /* Put the various virtual registers into REGNO_REG_RTX. */
5137 init_virtual_regs (void)
5139 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5140 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5141 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5142 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5143 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5144 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5145 = virtual_preferred_stack_boundary_rtx;
5149 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5150 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5151 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5152 static int copy_insn_n_scratches;
5154 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5155 copied an ASM_OPERANDS.
5156 In that case, it is the original input-operand vector. */
5157 static rtvec orig_asm_operands_vector;
5159 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5160 copied an ASM_OPERANDS.
5161 In that case, it is the copied input-operand vector. */
5162 static rtvec copy_asm_operands_vector;
5164 /* Likewise for the constraints vector. */
5165 static rtvec orig_asm_constraints_vector;
5166 static rtvec copy_asm_constraints_vector;
5168 /* Recursively create a new copy of an rtx for copy_insn.
5169 This function differs from copy_rtx in that it handles SCRATCHes and
5170 ASM_OPERANDs properly.
5171 Normally, this function is not used directly; use copy_insn as front end.
5172 However, you could first copy an insn pattern with copy_insn and then use
5173 this function afterwards to properly copy any REG_NOTEs containing
5177 copy_insn_1 (rtx orig)
5182 const char *format_ptr;
5187 code = GET_CODE (orig);
5202 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5207 for (i = 0; i < copy_insn_n_scratches; i++)
5208 if (copy_insn_scratch_in[i] == orig)
5209 return copy_insn_scratch_out[i];
5213 if (shared_const_p (orig))
5217 /* A MEM with a constant address is not sharable. The problem is that
5218 the constant address may need to be reloaded. If the mem is shared,
5219 then reloading one copy of this mem will cause all copies to appear
5220 to have been reloaded. */
5226 /* Copy the various flags, fields, and other information. We assume
5227 that all fields need copying, and then clear the fields that should
5228 not be copied. That is the sensible default behavior, and forces
5229 us to explicitly document why we are *not* copying a flag. */
5230 copy = shallow_copy_rtx (orig);
5232 /* We do not copy the USED flag, which is used as a mark bit during
5233 walks over the RTL. */
5234 RTX_FLAG (copy, used) = 0;
5236 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5239 RTX_FLAG (copy, jump) = 0;
5240 RTX_FLAG (copy, call) = 0;
5241 RTX_FLAG (copy, frame_related) = 0;
5244 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5246 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5247 switch (*format_ptr++)
5250 if (XEXP (orig, i) != NULL)
5251 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5256 if (XVEC (orig, i) == orig_asm_constraints_vector)
5257 XVEC (copy, i) = copy_asm_constraints_vector;
5258 else if (XVEC (orig, i) == orig_asm_operands_vector)
5259 XVEC (copy, i) = copy_asm_operands_vector;
5260 else if (XVEC (orig, i) != NULL)
5262 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5263 for (j = 0; j < XVECLEN (copy, i); j++)
5264 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5275 /* These are left unchanged. */
5282 if (code == SCRATCH)
5284 i = copy_insn_n_scratches++;
5285 gcc_assert (i < MAX_RECOG_OPERANDS);
5286 copy_insn_scratch_in[i] = orig;
5287 copy_insn_scratch_out[i] = copy;
5289 else if (code == ASM_OPERANDS)
5291 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5292 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5293 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5294 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5300 /* Create a new copy of an rtx.
5301 This function differs from copy_rtx in that it handles SCRATCHes and
5302 ASM_OPERANDs properly.
5303 INSN doesn't really have to be a full INSN; it could be just the
5306 copy_insn (rtx insn)
5308 copy_insn_n_scratches = 0;
5309 orig_asm_operands_vector = 0;
5310 orig_asm_constraints_vector = 0;
5311 copy_asm_operands_vector = 0;
5312 copy_asm_constraints_vector = 0;
5313 return copy_insn_1 (insn);
5316 /* Initialize data structures and variables in this file
5317 before generating rtl for each function. */
5322 set_first_insn (NULL);
5323 set_last_insn (NULL);
5324 if (MIN_NONDEBUG_INSN_UID)
5325 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5328 cur_debug_insn_uid = 1;
5329 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5330 last_location = UNKNOWN_LOCATION;
5331 first_label_num = label_num;
5334 /* Init the tables that describe all the pseudo regs. */
5336 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5338 crtl->emit.regno_pointer_align
5339 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5341 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5343 /* Put copies of all the hard registers into regno_reg_rtx. */
5344 memcpy (regno_reg_rtx,
5345 initial_regno_reg_rtx,
5346 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5348 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5349 init_virtual_regs ();
5351 /* Indicate that the virtual registers and stack locations are
5353 REG_POINTER (stack_pointer_rtx) = 1;
5354 REG_POINTER (frame_pointer_rtx) = 1;
5355 REG_POINTER (hard_frame_pointer_rtx) = 1;
5356 REG_POINTER (arg_pointer_rtx) = 1;
5358 REG_POINTER (virtual_incoming_args_rtx) = 1;
5359 REG_POINTER (virtual_stack_vars_rtx) = 1;
5360 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5361 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5362 REG_POINTER (virtual_cfa_rtx) = 1;
5364 #ifdef STACK_BOUNDARY
5365 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5366 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5367 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5368 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5370 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5371 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5372 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5373 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5374 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5377 #ifdef INIT_EXPANDERS
5382 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5385 gen_const_vector (enum machine_mode mode, int constant)
5390 enum machine_mode inner;
5392 units = GET_MODE_NUNITS (mode);
5393 inner = GET_MODE_INNER (mode);
5395 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5397 v = rtvec_alloc (units);
5399 /* We need to call this function after we set the scalar const_tiny_rtx
5401 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5403 for (i = 0; i < units; ++i)
5404 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5406 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5410 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5411 all elements are zero, and the one vector when all elements are one. */
5413 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5415 enum machine_mode inner = GET_MODE_INNER (mode);
5416 int nunits = GET_MODE_NUNITS (mode);
5420 /* Check to see if all of the elements have the same value. */
5421 x = RTVEC_ELT (v, nunits - 1);
5422 for (i = nunits - 2; i >= 0; i--)
5423 if (RTVEC_ELT (v, i) != x)
5426 /* If the values are all the same, check to see if we can use one of the
5427 standard constant vectors. */
5430 if (x == CONST0_RTX (inner))
5431 return CONST0_RTX (mode);
5432 else if (x == CONST1_RTX (inner))
5433 return CONST1_RTX (mode);
5436 return gen_rtx_raw_CONST_VECTOR (mode, v);
5439 /* Initialise global register information required by all functions. */
5442 init_emit_regs (void)
5446 /* Reset register attributes */
5447 htab_empty (reg_attrs_htab);
5449 /* We need reg_raw_mode, so initialize the modes now. */
5450 init_reg_modes_target ();
5452 /* Assign register numbers to the globally defined register rtx. */
5453 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5454 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5455 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
5456 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5457 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5458 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5459 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5460 virtual_incoming_args_rtx =
5461 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5462 virtual_stack_vars_rtx =
5463 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5464 virtual_stack_dynamic_rtx =
5465 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5466 virtual_outgoing_args_rtx =
5467 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5468 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5469 virtual_preferred_stack_boundary_rtx =
5470 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5472 /* Initialize RTL for commonly used hard registers. These are
5473 copied into regno_reg_rtx as we begin to compile each function. */
5474 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5475 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5477 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5478 return_address_pointer_rtx
5479 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5482 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5483 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5485 pic_offset_table_rtx = NULL_RTX;
5488 /* Create some permanent unique rtl objects shared between all functions. */
5491 init_emit_once (void)
5494 enum machine_mode mode;
5495 enum machine_mode double_mode;
5497 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5499 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5500 const_int_htab_eq, NULL);
5502 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5503 const_double_htab_eq, NULL);
5505 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5506 const_fixed_htab_eq, NULL);
5508 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5509 mem_attrs_htab_eq, NULL);
5510 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5511 reg_attrs_htab_eq, NULL);
5513 /* Compute the word and byte modes. */
5515 byte_mode = VOIDmode;
5516 word_mode = VOIDmode;
5517 double_mode = VOIDmode;
5519 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5521 mode = GET_MODE_WIDER_MODE (mode))
5523 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5524 && byte_mode == VOIDmode)
5527 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5528 && word_mode == VOIDmode)
5532 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5534 mode = GET_MODE_WIDER_MODE (mode))
5536 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5537 && double_mode == VOIDmode)
5541 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5543 #ifdef INIT_EXPANDERS
5544 /* This is to initialize {init|mark|free}_machine_status before the first
5545 call to push_function_context_to. This is needed by the Chill front
5546 end which calls push_function_context_to before the first call to
5547 init_function_start. */
5551 /* Create the unique rtx's for certain rtx codes and operand values. */
5553 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5554 tries to use these variables. */
5555 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5556 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5557 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5559 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5560 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5561 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5563 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5565 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5566 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5567 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5572 dconsthalf = dconst1;
5573 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5575 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5577 const REAL_VALUE_TYPE *const r =
5578 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5580 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5582 mode = GET_MODE_WIDER_MODE (mode))
5583 const_tiny_rtx[i][(int) mode] =
5584 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5586 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5588 mode = GET_MODE_WIDER_MODE (mode))
5589 const_tiny_rtx[i][(int) mode] =
5590 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5592 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5594 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5596 mode = GET_MODE_WIDER_MODE (mode))
5597 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5599 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5601 mode = GET_MODE_WIDER_MODE (mode))
5602 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5605 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5607 mode = GET_MODE_WIDER_MODE (mode))
5609 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5610 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5613 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5615 mode = GET_MODE_WIDER_MODE (mode))
5617 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5618 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5621 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5623 mode = GET_MODE_WIDER_MODE (mode))
5625 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5626 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5629 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5631 mode = GET_MODE_WIDER_MODE (mode))
5633 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5634 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5637 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5639 mode = GET_MODE_WIDER_MODE (mode))
5641 FCONST0(mode).data.high = 0;
5642 FCONST0(mode).data.low = 0;
5643 FCONST0(mode).mode = mode;
5644 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5645 FCONST0 (mode), mode);
5648 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5650 mode = GET_MODE_WIDER_MODE (mode))
5652 FCONST0(mode).data.high = 0;
5653 FCONST0(mode).data.low = 0;
5654 FCONST0(mode).mode = mode;
5655 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5656 FCONST0 (mode), mode);
5659 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5661 mode = GET_MODE_WIDER_MODE (mode))
5663 FCONST0(mode).data.high = 0;
5664 FCONST0(mode).data.low = 0;
5665 FCONST0(mode).mode = mode;
5666 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5667 FCONST0 (mode), mode);
5669 /* We store the value 1. */
5670 FCONST1(mode).data.high = 0;
5671 FCONST1(mode).data.low = 0;
5672 FCONST1(mode).mode = mode;
5673 lshift_double (1, 0, GET_MODE_FBIT (mode),
5674 2 * HOST_BITS_PER_WIDE_INT,
5675 &FCONST1(mode).data.low,
5676 &FCONST1(mode).data.high,
5677 SIGNED_FIXED_POINT_MODE_P (mode));
5678 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5679 FCONST1 (mode), mode);
5682 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5684 mode = GET_MODE_WIDER_MODE (mode))
5686 FCONST0(mode).data.high = 0;
5687 FCONST0(mode).data.low = 0;
5688 FCONST0(mode).mode = mode;
5689 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5690 FCONST0 (mode), mode);
5692 /* We store the value 1. */
5693 FCONST1(mode).data.high = 0;
5694 FCONST1(mode).data.low = 0;
5695 FCONST1(mode).mode = mode;
5696 lshift_double (1, 0, GET_MODE_FBIT (mode),
5697 2 * HOST_BITS_PER_WIDE_INT,
5698 &FCONST1(mode).data.low,
5699 &FCONST1(mode).data.high,
5700 SIGNED_FIXED_POINT_MODE_P (mode));
5701 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5702 FCONST1 (mode), mode);
5705 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5707 mode = GET_MODE_WIDER_MODE (mode))
5709 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5712 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5714 mode = GET_MODE_WIDER_MODE (mode))
5716 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5719 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5721 mode = GET_MODE_WIDER_MODE (mode))
5723 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5724 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5727 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5729 mode = GET_MODE_WIDER_MODE (mode))
5731 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5732 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5735 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5736 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5737 const_tiny_rtx[0][i] = const0_rtx;
5739 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5740 if (STORE_FLAG_VALUE == 1)
5741 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5744 /* Produce exact duplicate of insn INSN after AFTER.
5745 Care updating of libcall regions if present. */
5748 emit_copy_of_insn_after (rtx insn, rtx after)
5752 switch (GET_CODE (insn))
5755 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5759 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5763 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5767 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5768 if (CALL_INSN_FUNCTION_USAGE (insn))
5769 CALL_INSN_FUNCTION_USAGE (new_rtx)
5770 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5771 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5772 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5773 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5774 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5775 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5782 /* Update LABEL_NUSES. */
5783 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
5785 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
5787 /* If the old insn is frame related, then so is the new one. This is
5788 primarily needed for IA-64 unwind info which marks epilogue insns,
5789 which may be duplicated by the basic block reordering code. */
5790 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
5792 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5793 will make them. REG_LABEL_TARGETs are created there too, but are
5794 supposed to be sticky, so we copy them. */
5795 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5796 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5798 if (GET_CODE (link) == EXPR_LIST)
5799 add_reg_note (new_rtx, REG_NOTE_KIND (link),
5800 copy_insn_1 (XEXP (link, 0)));
5802 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
5805 INSN_CODE (new_rtx) = INSN_CODE (insn);
5809 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5811 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5813 if (hard_reg_clobbers[mode][regno])
5814 return hard_reg_clobbers[mode][regno];
5816 return (hard_reg_clobbers[mode][regno] =
5817 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5820 #include "gt-emit-rtl.h"