1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /* Middle-to-low level generation of rtx code and insns.
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
33 dependent is the kind of rtx's they make and what arguments they
38 #include "coretypes.h"
48 #include "hard-reg-set.h"
50 #include "insn-config.h"
54 #include "basic-block.h"
57 #include "langhooks.h"
58 #include "tree-pass.h"
60 /* Commonly used modes. */
62 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
63 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
64 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
65 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
68 /* This is *not* reset after each function. It gives each CODE_LABEL
69 in the entire compilation a unique label number. */
71 static GTY(()) int label_num = 1;
73 /* Nonzero means do not generate NOTEs for source line numbers. */
75 static int no_line_numbers;
77 /* Commonly used rtx's, so that we only need space for one copy.
78 These are initialized once for the entire compilation.
79 All of these are unique; no other rtx-object will be equal to any
82 rtx global_rtl[GR_MAX];
84 /* Commonly used RTL for hard registers. These objects are not necessarily
85 unique, so we allocate them separately from global_rtl. They are
86 initialized once per compilation unit, then copied into regno_reg_rtx
87 at the beginning of each function. */
88 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
90 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
91 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
92 record a copy of const[012]_rtx. */
94 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
98 REAL_VALUE_TYPE dconst0;
99 REAL_VALUE_TYPE dconst1;
100 REAL_VALUE_TYPE dconst2;
101 REAL_VALUE_TYPE dconst3;
102 REAL_VALUE_TYPE dconst10;
103 REAL_VALUE_TYPE dconstm1;
104 REAL_VALUE_TYPE dconstm2;
105 REAL_VALUE_TYPE dconsthalf;
106 REAL_VALUE_TYPE dconstthird;
107 REAL_VALUE_TYPE dconstpi;
108 REAL_VALUE_TYPE dconste;
110 /* All references to the following fixed hard registers go through
111 these unique rtl objects. On machines where the frame-pointer and
112 arg-pointer are the same register, they use the same unique object.
114 After register allocation, other rtl objects which used to be pseudo-regs
115 may be clobbered to refer to the frame-pointer register.
116 But references that were originally to the frame-pointer can be
117 distinguished from the others because they contain frame_pointer_rtx.
119 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
120 tricky: until register elimination has taken place hard_frame_pointer_rtx
121 should be used if it is being set, and frame_pointer_rtx otherwise. After
122 register elimination hard_frame_pointer_rtx should always be used.
123 On machines where the two registers are same (most) then these are the
126 In an inline procedure, the stack and frame pointer rtxs may not be
127 used for anything else. */
128 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
129 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
130 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
132 /* This is used to implement __builtin_return_address for some machines.
133 See for instance the MIPS port. */
134 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
136 /* We make one copy of (const_int C) where C is in
137 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
138 to save space during the compilation and simplify comparisons of
141 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
143 /* A hash table storing CONST_INTs whose absolute value is greater
144 than MAX_SAVED_CONST_INT. */
146 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
147 htab_t const_int_htab;
149 /* A hash table storing memory attribute structures. */
150 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
151 htab_t mem_attrs_htab;
153 /* A hash table storing register attribute structures. */
154 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
155 htab_t reg_attrs_htab;
157 /* A hash table storing all CONST_DOUBLEs. */
158 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
159 htab_t const_double_htab;
161 #define first_insn (cfun->emit->x_first_insn)
162 #define last_insn (cfun->emit->x_last_insn)
163 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
164 #define last_location (cfun->emit->x_last_location)
165 #define first_label_num (cfun->emit->x_first_label_num)
167 static rtx make_jump_insn_raw (rtx);
168 static rtx make_call_insn_raw (rtx);
169 static rtx find_line_note (rtx);
170 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
171 static void unshare_all_decls (tree);
172 static void reset_used_decls (tree);
173 static void mark_label_nuses (rtx);
174 static hashval_t const_int_htab_hash (const void *);
175 static int const_int_htab_eq (const void *, const void *);
176 static hashval_t const_double_htab_hash (const void *);
177 static int const_double_htab_eq (const void *, const void *);
178 static rtx lookup_const_double (rtx);
179 static hashval_t mem_attrs_htab_hash (const void *);
180 static int mem_attrs_htab_eq (const void *, const void *);
181 static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
183 static hashval_t reg_attrs_htab_hash (const void *);
184 static int reg_attrs_htab_eq (const void *, const void *);
185 static reg_attrs *get_reg_attrs (tree, int);
186 static tree component_ref_for_mem_expr (tree);
187 static rtx gen_const_vector (enum machine_mode, int);
188 static void copy_rtx_if_shared_1 (rtx *orig);
190 /* Probability of the conditional branch currently proceeded by try_split.
191 Set to -1 otherwise. */
192 int split_branch_probability = -1;
194 /* Returns a hash code for X (which is a really a CONST_INT). */
197 const_int_htab_hash (const void *x)
199 return (hashval_t) INTVAL ((rtx) x);
202 /* Returns nonzero if the value represented by X (which is really a
203 CONST_INT) is the same as that given by Y (which is really a
207 const_int_htab_eq (const void *x, const void *y)
209 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
212 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
214 const_double_htab_hash (const void *x)
219 if (GET_MODE (value) == VOIDmode)
220 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
223 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
224 /* MODE is used in the comparison, so it should be in the hash. */
225 h ^= GET_MODE (value);
230 /* Returns nonzero if the value represented by X (really a ...)
231 is the same as that represented by Y (really a ...) */
233 const_double_htab_eq (const void *x, const void *y)
235 rtx a = (rtx)x, b = (rtx)y;
237 if (GET_MODE (a) != GET_MODE (b))
239 if (GET_MODE (a) == VOIDmode)
240 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
241 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
243 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
244 CONST_DOUBLE_REAL_VALUE (b));
247 /* Returns a hash code for X (which is a really a mem_attrs *). */
250 mem_attrs_htab_hash (const void *x)
252 mem_attrs *p = (mem_attrs *) x;
254 return (p->alias ^ (p->align * 1000)
255 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
256 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
260 /* Returns nonzero if the value represented by X (which is really a
261 mem_attrs *) is the same as that given by Y (which is also really a
265 mem_attrs_htab_eq (const void *x, const void *y)
267 mem_attrs *p = (mem_attrs *) x;
268 mem_attrs *q = (mem_attrs *) y;
270 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
271 && p->size == q->size && p->align == q->align);
274 /* Allocate a new mem_attrs structure and insert it into the hash table if
275 one identical to it is not already in the table. We are doing this for
279 get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
280 unsigned int align, enum machine_mode mode)
285 /* If everything is the default, we can just return zero.
286 This must match what the corresponding MEM_* macros return when the
287 field is not present. */
288 if (alias == 0 && expr == 0 && offset == 0
290 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
291 && (STRICT_ALIGNMENT && mode != BLKmode
292 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
297 attrs.offset = offset;
301 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
304 *slot = ggc_alloc (sizeof (mem_attrs));
305 memcpy (*slot, &attrs, sizeof (mem_attrs));
311 /* Returns a hash code for X (which is a really a reg_attrs *). */
314 reg_attrs_htab_hash (const void *x)
316 reg_attrs *p = (reg_attrs *) x;
318 return ((p->offset * 1000) ^ (long) p->decl);
321 /* Returns nonzero if the value represented by X (which is really a
322 reg_attrs *) is the same as that given by Y (which is also really a
326 reg_attrs_htab_eq (const void *x, const void *y)
328 reg_attrs *p = (reg_attrs *) x;
329 reg_attrs *q = (reg_attrs *) y;
331 return (p->decl == q->decl && p->offset == q->offset);
333 /* Allocate a new reg_attrs structure and insert it into the hash table if
334 one identical to it is not already in the table. We are doing this for
338 get_reg_attrs (tree decl, int offset)
343 /* If everything is the default, we can just return zero. */
344 if (decl == 0 && offset == 0)
348 attrs.offset = offset;
350 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
353 *slot = ggc_alloc (sizeof (reg_attrs));
354 memcpy (*slot, &attrs, sizeof (reg_attrs));
360 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
361 don't attempt to share with the various global pieces of rtl (such as
362 frame_pointer_rtx). */
365 gen_raw_REG (enum machine_mode mode, int regno)
367 rtx x = gen_rtx_raw_REG (mode, regno);
368 ORIGINAL_REGNO (x) = regno;
372 /* There are some RTL codes that require special attention; the generation
373 functions do the raw handling. If you add to this list, modify
374 special_rtx in gengenrtl.c as well. */
377 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
381 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
382 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
384 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
385 if (const_true_rtx && arg == STORE_FLAG_VALUE)
386 return const_true_rtx;
389 /* Look up the CONST_INT in the hash table. */
390 slot = htab_find_slot_with_hash (const_int_htab, &arg,
391 (hashval_t) arg, INSERT);
393 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
399 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
401 return GEN_INT (trunc_int_for_mode (c, mode));
404 /* CONST_DOUBLEs might be created from pairs of integers, or from
405 REAL_VALUE_TYPEs. Also, their length is known only at run time,
406 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
408 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
409 hash table. If so, return its counterpart; otherwise add it
410 to the hash table and return it. */
412 lookup_const_double (rtx real)
414 void **slot = htab_find_slot (const_double_htab, real, INSERT);
421 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
422 VALUE in mode MODE. */
424 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
426 rtx real = rtx_alloc (CONST_DOUBLE);
427 PUT_MODE (real, mode);
431 return lookup_const_double (real);
434 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
435 of ints: I0 is the low-order word and I1 is the high-order word.
436 Do not use this routine for non-integer modes; convert to
437 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
440 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
445 if (mode != VOIDmode)
449 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
450 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
451 /* We can get a 0 for an error mark. */
452 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
453 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
455 /* We clear out all bits that don't belong in MODE, unless they and
456 our sign bit are all one. So we get either a reasonable negative
457 value or a reasonable unsigned value for this mode. */
458 width = GET_MODE_BITSIZE (mode);
459 if (width < HOST_BITS_PER_WIDE_INT
460 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
461 != ((HOST_WIDE_INT) (-1) << (width - 1))))
462 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
463 else if (width == HOST_BITS_PER_WIDE_INT
464 && ! (i1 == ~0 && i0 < 0))
467 /* We should be able to represent this value as a constant. */
468 gcc_assert (width <= 2 * HOST_BITS_PER_WIDE_INT);
470 /* If this would be an entire word for the target, but is not for
471 the host, then sign-extend on the host so that the number will
472 look the same way on the host that it would on the target.
474 For example, when building a 64 bit alpha hosted 32 bit sparc
475 targeted compiler, then we want the 32 bit unsigned value -1 to be
476 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
477 The latter confuses the sparc backend. */
479 if (width < HOST_BITS_PER_WIDE_INT
480 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
481 i0 |= ((HOST_WIDE_INT) (-1) << width);
483 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
486 ??? Strictly speaking, this is wrong if we create a CONST_INT for
487 a large unsigned constant with the size of MODE being
488 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
489 in a wider mode. In that case we will mis-interpret it as a
492 Unfortunately, the only alternative is to make a CONST_DOUBLE for
493 any constant in any mode if it is an unsigned constant larger
494 than the maximum signed integer in an int on the host. However,
495 doing this will break everyone that always expects to see a
496 CONST_INT for SImode and smaller.
498 We have always been making CONST_INTs in this case, so nothing
499 new is being broken. */
501 if (width <= HOST_BITS_PER_WIDE_INT)
502 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
505 /* If this integer fits in one word, return a CONST_INT. */
506 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
509 /* We use VOIDmode for integers. */
510 value = rtx_alloc (CONST_DOUBLE);
511 PUT_MODE (value, VOIDmode);
513 CONST_DOUBLE_LOW (value) = i0;
514 CONST_DOUBLE_HIGH (value) = i1;
516 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
517 XWINT (value, i) = 0;
519 return lookup_const_double (value);
523 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
525 /* In case the MD file explicitly references the frame pointer, have
526 all such references point to the same frame pointer. This is
527 used during frame pointer elimination to distinguish the explicit
528 references to these registers from pseudos that happened to be
531 If we have eliminated the frame pointer or arg pointer, we will
532 be using it as a normal register, for example as a spill
533 register. In such cases, we might be accessing it in a mode that
534 is not Pmode and therefore cannot use the pre-allocated rtx.
536 Also don't do this when we are making new REGs in reload, since
537 we don't want to get confused with the real pointers. */
539 if (mode == Pmode && !reload_in_progress)
541 if (regno == FRAME_POINTER_REGNUM
542 && (!reload_completed || frame_pointer_needed))
543 return frame_pointer_rtx;
544 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
545 if (regno == HARD_FRAME_POINTER_REGNUM
546 && (!reload_completed || frame_pointer_needed))
547 return hard_frame_pointer_rtx;
549 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
550 if (regno == ARG_POINTER_REGNUM)
551 return arg_pointer_rtx;
553 #ifdef RETURN_ADDRESS_POINTER_REGNUM
554 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
555 return return_address_pointer_rtx;
557 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
558 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
559 return pic_offset_table_rtx;
560 if (regno == STACK_POINTER_REGNUM)
561 return stack_pointer_rtx;
565 /* If the per-function register table has been set up, try to re-use
566 an existing entry in that table to avoid useless generation of RTL.
568 This code is disabled for now until we can fix the various backends
569 which depend on having non-shared hard registers in some cases. Long
570 term we want to re-enable this code as it can significantly cut down
571 on the amount of useless RTL that gets generated.
573 We'll also need to fix some code that runs after reload that wants to
574 set ORIGINAL_REGNO. */
579 && regno < FIRST_PSEUDO_REGISTER
580 && reg_raw_mode[regno] == mode)
581 return regno_reg_rtx[regno];
584 return gen_raw_REG (mode, regno);
588 gen_rtx_MEM (enum machine_mode mode, rtx addr)
590 rtx rt = gen_rtx_raw_MEM (mode, addr);
592 /* This field is not cleared by the mere allocation of the rtx, so
599 /* Generate a memory referring to non-trapping constant memory. */
602 gen_const_mem (enum machine_mode mode, rtx addr)
604 rtx mem = gen_rtx_MEM (mode, addr);
605 MEM_READONLY_P (mem) = 1;
606 MEM_NOTRAP_P (mem) = 1;
610 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
611 this construct would be valid, and false otherwise. */
614 validate_subreg (enum machine_mode omode, enum machine_mode imode,
615 rtx reg, unsigned int offset)
617 unsigned int isize = GET_MODE_SIZE (imode);
618 unsigned int osize = GET_MODE_SIZE (omode);
620 /* All subregs must be aligned. */
621 if (offset % osize != 0)
624 /* The subreg offset cannot be outside the inner object. */
628 /* ??? This should not be here. Temporarily continue to allow word_mode
629 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
630 Generally, backends are doing something sketchy but it'll take time to
632 if (omode == word_mode)
634 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
635 is the culprit here, and not the backends. */
636 else if (osize >= UNITS_PER_WORD && isize >= osize)
638 /* Allow component subregs of complex and vector. Though given the below
639 extraction rules, it's not always clear what that means. */
640 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
641 && GET_MODE_INNER (imode) == omode)
643 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
644 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
645 represent this. It's questionable if this ought to be represented at
646 all -- why can't this all be hidden in post-reload splitters that make
647 arbitrarily mode changes to the registers themselves. */
648 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
650 /* Subregs involving floating point modes are not allowed to
651 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
652 (subreg:SI (reg:DF) 0) isn't. */
653 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
659 /* Paradoxical subregs must have offset zero. */
663 /* This is a normal subreg. Verify that the offset is representable. */
665 /* For hard registers, we already have most of these rules collected in
666 subreg_offset_representable_p. */
667 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
669 unsigned int regno = REGNO (reg);
671 #ifdef CANNOT_CHANGE_MODE_CLASS
672 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
673 && GET_MODE_INNER (imode) == omode)
675 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
679 return subreg_offset_representable_p (regno, imode, offset, omode);
682 /* For pseudo registers, we want most of the same checks. Namely:
683 If the register no larger than a word, the subreg must be lowpart.
684 If the register is larger than a word, the subreg must be the lowpart
685 of a subword. A subreg does *not* perform arbitrary bit extraction.
686 Given that we've already checked mode/offset alignment, we only have
687 to check subword subregs here. */
688 if (osize < UNITS_PER_WORD)
690 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
691 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
692 if (offset % UNITS_PER_WORD != low_off)
699 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
701 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
702 return gen_rtx_raw_SUBREG (mode, reg, offset);
705 /* Generate a SUBREG representing the least-significant part of REG if MODE
706 is smaller than mode of REG, otherwise paradoxical SUBREG. */
709 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
711 enum machine_mode inmode;
713 inmode = GET_MODE (reg);
714 if (inmode == VOIDmode)
716 return gen_rtx_SUBREG (mode, reg,
717 subreg_lowpart_offset (mode, inmode));
720 /* gen_rtvec (n, [rt1, ..., rtn])
722 ** This routine creates an rtvec and stores within it the
723 ** pointers to rtx's which are its arguments.
728 gen_rtvec (int n, ...)
737 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
739 vector = alloca (n * sizeof (rtx));
741 for (i = 0; i < n; i++)
742 vector[i] = va_arg (p, rtx);
744 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
748 return gen_rtvec_v (save_n, vector);
752 gen_rtvec_v (int n, rtx *argp)
758 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
760 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
762 for (i = 0; i < n; i++)
763 rt_val->elem[i] = *argp++;
768 /* Generate a REG rtx for a new pseudo register of mode MODE.
769 This pseudo is assigned the next sequential register number. */
772 gen_reg_rtx (enum machine_mode mode)
774 struct function *f = cfun;
777 /* Don't let anything called after initial flow analysis create new
779 gcc_assert (!no_new_pseudos);
781 if (generating_concat_p
782 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
783 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
785 /* For complex modes, don't make a single pseudo.
786 Instead, make a CONCAT of two pseudos.
787 This allows noncontiguous allocation of the real and imaginary parts,
788 which makes much better code. Besides, allocating DCmode
789 pseudos overstrains reload on some machines like the 386. */
790 rtx realpart, imagpart;
791 enum machine_mode partmode = GET_MODE_INNER (mode);
793 realpart = gen_reg_rtx (partmode);
794 imagpart = gen_reg_rtx (partmode);
795 return gen_rtx_CONCAT (mode, realpart, imagpart);
798 /* Make sure regno_pointer_align, and regno_reg_rtx are large
799 enough to have an element for this pseudo reg number. */
801 if (reg_rtx_no == f->emit->regno_pointer_align_length)
803 int old_size = f->emit->regno_pointer_align_length;
807 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
808 memset (new + old_size, 0, old_size);
809 f->emit->regno_pointer_align = (unsigned char *) new;
811 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
812 old_size * 2 * sizeof (rtx));
813 memset (new1 + old_size, 0, old_size * sizeof (rtx));
814 regno_reg_rtx = new1;
816 f->emit->regno_pointer_align_length = old_size * 2;
819 val = gen_raw_REG (mode, reg_rtx_no);
820 regno_reg_rtx[reg_rtx_no++] = val;
824 /* Generate a register with same attributes as REG, but offsetted by OFFSET.
825 Do the big endian correction if needed. */
828 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
830 rtx new = gen_rtx_REG (mode, regno);
832 HOST_WIDE_INT var_size;
834 /* PR middle-end/14084
835 The problem appears when a variable is stored in a larger register
836 and later it is used in the original mode or some mode in between
837 or some part of variable is accessed.
839 On little endian machines there is no problem because
840 the REG_OFFSET of the start of the variable is the same when
841 accessed in any mode (it is 0).
843 However, this is not true on big endian machines.
844 The offset of the start of the variable is different when accessed
846 When we are taking a part of the REG we have to change the OFFSET
847 from offset WRT size of mode of REG to offset WRT size of variable.
849 If we would not do the big endian correction the resulting REG_OFFSET
850 would be larger than the size of the DECL.
852 Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
854 REG.mode MODE DECL size old offset new offset description
855 DI SI 4 4 0 int32 in SImode
856 DI SI 1 4 0 char in SImode
857 DI QI 1 7 0 char in QImode
858 DI QI 4 5 1 1st element in QImode
860 DI HI 4 6 2 1st element in HImode
863 If the size of DECL is equal or greater than the size of REG
864 we can't do this correction because the register holds the
865 whole variable or a part of the variable and thus the REG_OFFSET
866 is already correct. */
868 decl = REG_EXPR (reg);
869 if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
872 && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
873 && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
874 && var_size < GET_MODE_SIZE (GET_MODE (reg))))
878 /* Convert machine endian to little endian WRT size of mode of REG. */
879 if (WORDS_BIG_ENDIAN)
880 offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
881 / UNITS_PER_WORD) * UNITS_PER_WORD;
883 offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
885 if (BYTES_BIG_ENDIAN)
886 offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
889 offset_le += offset % UNITS_PER_WORD;
891 if (offset_le >= var_size)
893 /* MODE is wider than the variable so the new reg will cover
894 the whole variable so the resulting OFFSET should be 0. */
899 /* Convert little endian to machine endian WRT size of variable. */
900 if (WORDS_BIG_ENDIAN)
901 offset = ((var_size - 1 - offset_le)
902 / UNITS_PER_WORD) * UNITS_PER_WORD;
904 offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
906 if (BYTES_BIG_ENDIAN)
907 offset += ((var_size - 1 - offset_le)
910 offset += offset_le % UNITS_PER_WORD;
914 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
915 REG_OFFSET (reg) + offset);
919 /* Set the decl for MEM to DECL. */
922 set_reg_attrs_from_mem (rtx reg, rtx mem)
924 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
926 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
929 /* Set the register attributes for registers contained in PARM_RTX.
930 Use needed values from memory attributes of MEM. */
933 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
935 if (REG_P (parm_rtx))
936 set_reg_attrs_from_mem (parm_rtx, mem);
937 else if (GET_CODE (parm_rtx) == PARALLEL)
939 /* Check for a NULL entry in the first slot, used to indicate that the
940 parameter goes both on the stack and in registers. */
941 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
942 for (; i < XVECLEN (parm_rtx, 0); i++)
944 rtx x = XVECEXP (parm_rtx, 0, i);
945 if (REG_P (XEXP (x, 0)))
946 REG_ATTRS (XEXP (x, 0))
947 = get_reg_attrs (MEM_EXPR (mem),
948 INTVAL (XEXP (x, 1)));
953 /* Assign the RTX X to declaration T. */
955 set_decl_rtl (tree t, rtx x)
957 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
961 /* For register, we maintain the reverse information too. */
963 REG_ATTRS (x) = get_reg_attrs (t, 0);
964 else if (GET_CODE (x) == SUBREG)
965 REG_ATTRS (SUBREG_REG (x))
966 = get_reg_attrs (t, -SUBREG_BYTE (x));
967 if (GET_CODE (x) == CONCAT)
969 if (REG_P (XEXP (x, 0)))
970 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
971 if (REG_P (XEXP (x, 1)))
972 REG_ATTRS (XEXP (x, 1))
973 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
975 if (GET_CODE (x) == PARALLEL)
978 for (i = 0; i < XVECLEN (x, 0); i++)
980 rtx y = XVECEXP (x, 0, i);
981 if (REG_P (XEXP (y, 0)))
982 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
987 /* Assign the RTX X to parameter declaration T. */
989 set_decl_incoming_rtl (tree t, rtx x)
991 DECL_INCOMING_RTL (t) = x;
995 /* For register, we maintain the reverse information too. */
997 REG_ATTRS (x) = get_reg_attrs (t, 0);
998 else if (GET_CODE (x) == SUBREG)
999 REG_ATTRS (SUBREG_REG (x))
1000 = get_reg_attrs (t, -SUBREG_BYTE (x));
1001 if (GET_CODE (x) == CONCAT)
1003 if (REG_P (XEXP (x, 0)))
1004 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1005 if (REG_P (XEXP (x, 1)))
1006 REG_ATTRS (XEXP (x, 1))
1007 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1009 if (GET_CODE (x) == PARALLEL)
1013 /* Check for a NULL entry, used to indicate that the parameter goes
1014 both on the stack and in registers. */
1015 if (XEXP (XVECEXP (x, 0, 0), 0))
1020 for (i = start; i < XVECLEN (x, 0); i++)
1022 rtx y = XVECEXP (x, 0, i);
1023 if (REG_P (XEXP (y, 0)))
1024 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1029 /* Identify REG (which may be a CONCAT) as a user register. */
1032 mark_user_reg (rtx reg)
1034 if (GET_CODE (reg) == CONCAT)
1036 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1037 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1041 gcc_assert (REG_P (reg));
1042 REG_USERVAR_P (reg) = 1;
1046 /* Identify REG as a probable pointer register and show its alignment
1047 as ALIGN, if nonzero. */
1050 mark_reg_pointer (rtx reg, int align)
1052 if (! REG_POINTER (reg))
1054 REG_POINTER (reg) = 1;
1057 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1059 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1060 /* We can no-longer be sure just how aligned this pointer is. */
1061 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1064 /* Return 1 plus largest pseudo reg number used in the current function. */
1072 /* Return 1 + the largest label number used so far in the current function. */
1075 max_label_num (void)
1080 /* Return first label number used in this function (if any were used). */
1083 get_first_label_num (void)
1085 return first_label_num;
1088 /* If the rtx for label was created during the expansion of a nested
1089 function, then first_label_num won't include this label number.
1090 Fix this now so that array indicies work later. */
1093 maybe_set_first_label_num (rtx x)
1095 if (CODE_LABEL_NUMBER (x) < first_label_num)
1096 first_label_num = CODE_LABEL_NUMBER (x);
1099 /* Return a value representing some low-order bits of X, where the number
1100 of low-order bits is given by MODE. Note that no conversion is done
1101 between floating-point and fixed-point values, rather, the bit
1102 representation is returned.
1104 This function handles the cases in common between gen_lowpart, below,
1105 and two variants in cse.c and combine.c. These are the cases that can
1106 be safely handled at all points in the compilation.
1108 If this is not a case we can handle, return 0. */
1111 gen_lowpart_common (enum machine_mode mode, rtx x)
1113 int msize = GET_MODE_SIZE (mode);
1116 enum machine_mode innermode;
1118 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1119 so we have to make one up. Yuk. */
1120 innermode = GET_MODE (x);
1121 if (GET_CODE (x) == CONST_INT
1122 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1123 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1124 else if (innermode == VOIDmode)
1125 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1127 xsize = GET_MODE_SIZE (innermode);
1129 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1131 if (innermode == mode)
1134 /* MODE must occupy no more words than the mode of X. */
1135 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1136 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1139 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1140 if (GET_MODE_CLASS (mode) == MODE_FLOAT && msize > xsize)
1143 offset = subreg_lowpart_offset (mode, innermode);
1145 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1146 && (GET_MODE_CLASS (mode) == MODE_INT
1147 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1149 /* If we are getting the low-order part of something that has been
1150 sign- or zero-extended, we can either just use the object being
1151 extended or make a narrower extension. If we want an even smaller
1152 piece than the size of the object being extended, call ourselves
1155 This case is used mostly by combine and cse. */
1157 if (GET_MODE (XEXP (x, 0)) == mode)
1159 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1160 return gen_lowpart_common (mode, XEXP (x, 0));
1161 else if (msize < xsize)
1162 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1164 else if (GET_CODE (x) == SUBREG || REG_P (x)
1165 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1166 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1167 return simplify_gen_subreg (mode, x, innermode, offset);
1169 /* Otherwise, we can't do this. */
1174 gen_highpart (enum machine_mode mode, rtx x)
1176 unsigned int msize = GET_MODE_SIZE (mode);
1179 /* This case loses if X is a subreg. To catch bugs early,
1180 complain if an invalid MODE is used even in other cases. */
1181 gcc_assert (msize <= UNITS_PER_WORD
1182 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1184 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1185 subreg_highpart_offset (mode, GET_MODE (x)));
1186 gcc_assert (result);
1188 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1189 the target if we have a MEM. gen_highpart must return a valid operand,
1190 emitting code if necessary to do so. */
1193 result = validize_mem (result);
1194 gcc_assert (result);
1200 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1201 be VOIDmode constant. */
1203 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1205 if (GET_MODE (exp) != VOIDmode)
1207 gcc_assert (GET_MODE (exp) == innermode);
1208 return gen_highpart (outermode, exp);
1210 return simplify_gen_subreg (outermode, exp, innermode,
1211 subreg_highpart_offset (outermode, innermode));
1214 /* Return offset in bytes to get OUTERMODE low part
1215 of the value in mode INNERMODE stored in memory in target format. */
1218 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1220 unsigned int offset = 0;
1221 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1225 if (WORDS_BIG_ENDIAN)
1226 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1227 if (BYTES_BIG_ENDIAN)
1228 offset += difference % UNITS_PER_WORD;
1234 /* Return offset in bytes to get OUTERMODE high part
1235 of the value in mode INNERMODE stored in memory in target format. */
1237 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1239 unsigned int offset = 0;
1240 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1242 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1246 if (! WORDS_BIG_ENDIAN)
1247 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1248 if (! BYTES_BIG_ENDIAN)
1249 offset += difference % UNITS_PER_WORD;
1255 /* Return 1 iff X, assumed to be a SUBREG,
1256 refers to the least significant part of its containing reg.
1257 If X is not a SUBREG, always return 1 (it is its own low part!). */
1260 subreg_lowpart_p (rtx x)
1262 if (GET_CODE (x) != SUBREG)
1264 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1267 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1268 == SUBREG_BYTE (x));
1271 /* Return subword OFFSET of operand OP.
1272 The word number, OFFSET, is interpreted as the word number starting
1273 at the low-order address. OFFSET 0 is the low-order word if not
1274 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1276 If we cannot extract the required word, we return zero. Otherwise,
1277 an rtx corresponding to the requested word will be returned.
1279 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1280 reload has completed, a valid address will always be returned. After
1281 reload, if a valid address cannot be returned, we return zero.
1283 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1284 it is the responsibility of the caller.
1286 MODE is the mode of OP in case it is a CONST_INT.
1288 ??? This is still rather broken for some cases. The problem for the
1289 moment is that all callers of this thing provide no 'goal mode' to
1290 tell us to work with. This exists because all callers were written
1291 in a word based SUBREG world.
1292 Now use of this function can be deprecated by simplify_subreg in most
1297 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1299 if (mode == VOIDmode)
1300 mode = GET_MODE (op);
1302 gcc_assert (mode != VOIDmode);
1304 /* If OP is narrower than a word, fail. */
1306 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1309 /* If we want a word outside OP, return zero. */
1311 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1314 /* Form a new MEM at the requested address. */
1317 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1319 if (! validate_address)
1322 else if (reload_completed)
1324 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1328 return replace_equiv_address (new, XEXP (new, 0));
1331 /* Rest can be handled by simplify_subreg. */
1332 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1335 /* Similar to `operand_subword', but never return 0. If we can't
1336 extract the required subword, put OP into a register and try again.
1337 The second attempt must succeed. We always validate the address in
1340 MODE is the mode of OP, in case it is CONST_INT. */
1343 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1345 rtx result = operand_subword (op, offset, 1, mode);
1350 if (mode != BLKmode && mode != VOIDmode)
1352 /* If this is a register which can not be accessed by words, copy it
1353 to a pseudo register. */
1355 op = copy_to_reg (op);
1357 op = force_reg (mode, op);
1360 result = operand_subword (op, offset, 1, mode);
1361 gcc_assert (result);
1366 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1367 or (2) a component ref of something variable. Represent the later with
1368 a NULL expression. */
1371 component_ref_for_mem_expr (tree ref)
1373 tree inner = TREE_OPERAND (ref, 0);
1375 if (TREE_CODE (inner) == COMPONENT_REF)
1376 inner = component_ref_for_mem_expr (inner);
1379 /* Now remove any conversions: they don't change what the underlying
1380 object is. Likewise for SAVE_EXPR. */
1381 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1382 || TREE_CODE (inner) == NON_LVALUE_EXPR
1383 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1384 || TREE_CODE (inner) == SAVE_EXPR)
1385 inner = TREE_OPERAND (inner, 0);
1387 if (! DECL_P (inner))
1391 if (inner == TREE_OPERAND (ref, 0))
1394 return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1395 TREE_OPERAND (ref, 1), NULL_TREE);
1398 /* Returns 1 if both MEM_EXPR can be considered equal
1402 mem_expr_equal_p (tree expr1, tree expr2)
1407 if (! expr1 || ! expr2)
1410 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1413 if (TREE_CODE (expr1) == COMPONENT_REF)
1415 mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1416 TREE_OPERAND (expr2, 0))
1417 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1418 TREE_OPERAND (expr2, 1));
1420 if (INDIRECT_REF_P (expr1))
1421 return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1422 TREE_OPERAND (expr2, 0));
1424 /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1425 have been resolved here. */
1426 gcc_assert (DECL_P (expr1));
1428 /* Decls with different pointers can't be equal. */
1432 /* Given REF, a MEM, and T, either the type of X or the expression
1433 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1434 if we are making a new object of this type. BITPOS is nonzero if
1435 there is an offset outstanding on T that will be applied later. */
1438 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1439 HOST_WIDE_INT bitpos)
1441 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1442 tree expr = MEM_EXPR (ref);
1443 rtx offset = MEM_OFFSET (ref);
1444 rtx size = MEM_SIZE (ref);
1445 unsigned int align = MEM_ALIGN (ref);
1446 HOST_WIDE_INT apply_bitpos = 0;
1449 /* It can happen that type_for_mode was given a mode for which there
1450 is no language-level type. In which case it returns NULL, which
1455 type = TYPE_P (t) ? t : TREE_TYPE (t);
1456 if (type == error_mark_node)
1459 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1460 wrong answer, as it assumes that DECL_RTL already has the right alias
1461 info. Callers should not set DECL_RTL until after the call to
1462 set_mem_attributes. */
1463 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1465 /* Get the alias set from the expression or type (perhaps using a
1466 front-end routine) and use it. */
1467 alias = get_alias_set (t);
1469 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1470 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1471 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1473 /* If we are making an object of this type, or if this is a DECL, we know
1474 that it is a scalar if the type is not an aggregate. */
1475 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1476 MEM_SCALAR_P (ref) = 1;
1478 /* We can set the alignment from the type if we are making an object,
1479 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1480 if (objectp || TREE_CODE (t) == INDIRECT_REF
1481 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1482 || TYPE_ALIGN_OK (type))
1483 align = MAX (align, TYPE_ALIGN (type));
1485 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1487 if (integer_zerop (TREE_OPERAND (t, 1)))
1488 /* We don't know anything about the alignment. */
1489 align = BITS_PER_UNIT;
1491 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1494 /* If the size is known, we can set that. */
1495 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1496 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1498 /* If T is not a type, we may be able to deduce some more information about
1504 if (TREE_THIS_VOLATILE (t))
1505 MEM_VOLATILE_P (ref) = 1;
1507 /* Now remove any conversions: they don't change what the underlying
1508 object is. Likewise for SAVE_EXPR. */
1509 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1510 || TREE_CODE (t) == NON_LVALUE_EXPR
1511 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1512 || TREE_CODE (t) == SAVE_EXPR)
1513 t = TREE_OPERAND (t, 0);
1515 /* We may look through structure-like accesses for the purposes of
1516 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1518 while (TREE_CODE (base) == COMPONENT_REF
1519 || TREE_CODE (base) == REALPART_EXPR
1520 || TREE_CODE (base) == IMAGPART_EXPR
1521 || TREE_CODE (base) == BIT_FIELD_REF)
1522 base = TREE_OPERAND (base, 0);
1526 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1527 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1529 MEM_NOTRAP_P (ref) = 1;
1532 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1534 base = get_base_address (base);
1535 if (base && DECL_P (base)
1536 && TREE_READONLY (base)
1537 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1539 tree base_type = TREE_TYPE (base);
1540 gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1541 || DECL_ARTIFICIAL (base));
1542 MEM_READONLY_P (ref) = 1;
1545 /* If this expression uses it's parent's alias set, mark it such
1546 that we won't change it. */
1547 if (component_uses_parent_alias_set (t))
1548 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1550 /* If this is a decl, set the attributes of the MEM from it. */
1554 offset = const0_rtx;
1555 apply_bitpos = bitpos;
1556 size = (DECL_SIZE_UNIT (t)
1557 && host_integerp (DECL_SIZE_UNIT (t), 1)
1558 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1559 align = DECL_ALIGN (t);
1562 /* If this is a constant, we know the alignment. */
1563 else if (CONSTANT_CLASS_P (t))
1565 align = TYPE_ALIGN (type);
1566 #ifdef CONSTANT_ALIGNMENT
1567 align = CONSTANT_ALIGNMENT (t, align);
1571 /* If this is a field reference and not a bit-field, record it. */
1572 /* ??? There is some information that can be gleened from bit-fields,
1573 such as the word offset in the structure that might be modified.
1574 But skip it for now. */
1575 else if (TREE_CODE (t) == COMPONENT_REF
1576 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1578 expr = component_ref_for_mem_expr (t);
1579 offset = const0_rtx;
1580 apply_bitpos = bitpos;
1581 /* ??? Any reason the field size would be different than
1582 the size we got from the type? */
1585 /* If this is an array reference, look for an outer field reference. */
1586 else if (TREE_CODE (t) == ARRAY_REF)
1588 tree off_tree = size_zero_node;
1589 /* We can't modify t, because we use it at the end of the
1595 tree index = TREE_OPERAND (t2, 1);
1596 tree low_bound = array_ref_low_bound (t2);
1597 tree unit_size = array_ref_element_size (t2);
1599 /* We assume all arrays have sizes that are a multiple of a byte.
1600 First subtract the lower bound, if any, in the type of the
1601 index, then convert to sizetype and multiply by the size of
1602 the array element. */
1603 if (! integer_zerop (low_bound))
1604 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1607 off_tree = size_binop (PLUS_EXPR,
1608 size_binop (MULT_EXPR, convert (sizetype,
1612 t2 = TREE_OPERAND (t2, 0);
1614 while (TREE_CODE (t2) == ARRAY_REF);
1620 if (host_integerp (off_tree, 1))
1622 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1623 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1624 align = DECL_ALIGN (t2);
1625 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1627 offset = GEN_INT (ioff);
1628 apply_bitpos = bitpos;
1631 else if (TREE_CODE (t2) == COMPONENT_REF)
1633 expr = component_ref_for_mem_expr (t2);
1634 if (host_integerp (off_tree, 1))
1636 offset = GEN_INT (tree_low_cst (off_tree, 1));
1637 apply_bitpos = bitpos;
1639 /* ??? Any reason the field size would be different than
1640 the size we got from the type? */
1642 else if (flag_argument_noalias > 1
1643 && (INDIRECT_REF_P (t2))
1644 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1651 /* If this is a Fortran indirect argument reference, record the
1653 else if (flag_argument_noalias > 1
1654 && (INDIRECT_REF_P (t))
1655 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1662 /* If we modified OFFSET based on T, then subtract the outstanding
1663 bit position offset. Similarly, increase the size of the accessed
1664 object to contain the negative offset. */
1667 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1669 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1672 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1674 /* Force EXPR and OFFSE to NULL, since we don't know exactly what
1675 we're overlapping. */
1680 /* Now set the attributes we computed above. */
1682 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1684 /* If this is already known to be a scalar or aggregate, we are done. */
1685 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1688 /* If it is a reference into an aggregate, this is part of an aggregate.
1689 Otherwise we don't know. */
1690 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1691 || TREE_CODE (t) == ARRAY_RANGE_REF
1692 || TREE_CODE (t) == BIT_FIELD_REF)
1693 MEM_IN_STRUCT_P (ref) = 1;
1697 set_mem_attributes (rtx ref, tree t, int objectp)
1699 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1702 /* Set the decl for MEM to DECL. */
1705 set_mem_attrs_from_reg (rtx mem, rtx reg)
1708 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1709 GEN_INT (REG_OFFSET (reg)),
1710 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1713 /* Set the alias set of MEM to SET. */
1716 set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
1718 #ifdef ENABLE_CHECKING
1719 /* If the new and old alias sets don't conflict, something is wrong. */
1720 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1723 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1724 MEM_SIZE (mem), MEM_ALIGN (mem),
1728 /* Set the alignment of MEM to ALIGN bits. */
1731 set_mem_align (rtx mem, unsigned int align)
1733 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1734 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1738 /* Set the expr for MEM to EXPR. */
1741 set_mem_expr (rtx mem, tree expr)
1744 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1745 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1748 /* Set the offset of MEM to OFFSET. */
1751 set_mem_offset (rtx mem, rtx offset)
1753 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1754 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1758 /* Set the size of MEM to SIZE. */
1761 set_mem_size (rtx mem, rtx size)
1763 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1764 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1768 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1769 and its address changed to ADDR. (VOIDmode means don't change the mode.
1770 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1771 returned memory location is required to be valid. The memory
1772 attributes are not changed. */
1775 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1779 gcc_assert (MEM_P (memref));
1780 if (mode == VOIDmode)
1781 mode = GET_MODE (memref);
1783 addr = XEXP (memref, 0);
1784 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1785 && (!validate || memory_address_p (mode, addr)))
1790 if (reload_in_progress || reload_completed)
1791 gcc_assert (memory_address_p (mode, addr));
1793 addr = memory_address (mode, addr);
1796 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1799 new = gen_rtx_MEM (mode, addr);
1800 MEM_COPY_ATTRIBUTES (new, memref);
1804 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1805 way we are changing MEMREF, so we only preserve the alias set. */
1808 change_address (rtx memref, enum machine_mode mode, rtx addr)
1810 rtx new = change_address_1 (memref, mode, addr, 1), size;
1811 enum machine_mode mmode = GET_MODE (new);
1814 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1815 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1817 /* If there are no changes, just return the original memory reference. */
1820 if (MEM_ATTRS (memref) == 0
1821 || (MEM_EXPR (memref) == NULL
1822 && MEM_OFFSET (memref) == NULL
1823 && MEM_SIZE (memref) == size
1824 && MEM_ALIGN (memref) == align))
1827 new = gen_rtx_MEM (mmode, XEXP (memref, 0));
1828 MEM_COPY_ATTRIBUTES (new, memref);
1832 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1837 /* Return a memory reference like MEMREF, but with its mode changed
1838 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1839 nonzero, the memory address is forced to be valid.
1840 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1841 and caller is responsible for adjusting MEMREF base register. */
1844 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1845 int validate, int adjust)
1847 rtx addr = XEXP (memref, 0);
1849 rtx memoffset = MEM_OFFSET (memref);
1851 unsigned int memalign = MEM_ALIGN (memref);
1853 /* If there are no changes, just return the original memory reference. */
1854 if (mode == GET_MODE (memref) && !offset
1855 && (!validate || memory_address_p (mode, addr)))
1858 /* ??? Prefer to create garbage instead of creating shared rtl.
1859 This may happen even if offset is nonzero -- consider
1860 (plus (plus reg reg) const_int) -- so do this always. */
1861 addr = copy_rtx (addr);
1865 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1866 object, we can merge it into the LO_SUM. */
1867 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1869 && (unsigned HOST_WIDE_INT) offset
1870 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1871 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1872 plus_constant (XEXP (addr, 1), offset));
1874 addr = plus_constant (addr, offset);
1877 new = change_address_1 (memref, mode, addr, validate);
1879 /* Compute the new values of the memory attributes due to this adjustment.
1880 We add the offsets and update the alignment. */
1882 memoffset = GEN_INT (offset + INTVAL (memoffset));
1884 /* Compute the new alignment by taking the MIN of the alignment and the
1885 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1890 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1892 /* We can compute the size in a number of ways. */
1893 if (GET_MODE (new) != BLKmode)
1894 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1895 else if (MEM_SIZE (memref))
1896 size = plus_constant (MEM_SIZE (memref), -offset);
1898 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1899 memoffset, size, memalign, GET_MODE (new));
1901 /* At some point, we should validate that this offset is within the object,
1902 if all the appropriate values are known. */
1906 /* Return a memory reference like MEMREF, but with its mode changed
1907 to MODE and its address changed to ADDR, which is assumed to be
1908 MEMREF offseted by OFFSET bytes. If VALIDATE is
1909 nonzero, the memory address is forced to be valid. */
1912 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1913 HOST_WIDE_INT offset, int validate)
1915 memref = change_address_1 (memref, VOIDmode, addr, validate);
1916 return adjust_address_1 (memref, mode, offset, validate, 0);
1919 /* Return a memory reference like MEMREF, but whose address is changed by
1920 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1921 known to be in OFFSET (possibly 1). */
1924 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
1926 rtx new, addr = XEXP (memref, 0);
1928 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1930 /* At this point we don't know _why_ the address is invalid. It
1931 could have secondary memory references, multiplies or anything.
1933 However, if we did go and rearrange things, we can wind up not
1934 being able to recognize the magic around pic_offset_table_rtx.
1935 This stuff is fragile, and is yet another example of why it is
1936 bad to expose PIC machinery too early. */
1937 if (! memory_address_p (GET_MODE (memref), new)
1938 && GET_CODE (addr) == PLUS
1939 && XEXP (addr, 0) == pic_offset_table_rtx)
1941 addr = force_reg (GET_MODE (addr), addr);
1942 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1945 update_temp_slot_address (XEXP (memref, 0), new);
1946 new = change_address_1 (memref, VOIDmode, new, 1);
1948 /* If there are no changes, just return the original memory reference. */
1952 /* Update the alignment to reflect the offset. Reset the offset, which
1955 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
1956 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
1961 /* Return a memory reference like MEMREF, but with its address changed to
1962 ADDR. The caller is asserting that the actual piece of memory pointed
1963 to is the same, just the form of the address is being changed, such as
1964 by putting something into a register. */
1967 replace_equiv_address (rtx memref, rtx addr)
1969 /* change_address_1 copies the memory attribute structure without change
1970 and that's exactly what we want here. */
1971 update_temp_slot_address (XEXP (memref, 0), addr);
1972 return change_address_1 (memref, VOIDmode, addr, 1);
1975 /* Likewise, but the reference is not required to be valid. */
1978 replace_equiv_address_nv (rtx memref, rtx addr)
1980 return change_address_1 (memref, VOIDmode, addr, 0);
1983 /* Return a memory reference like MEMREF, but with its mode widened to
1984 MODE and offset by OFFSET. This would be used by targets that e.g.
1985 cannot issue QImode memory operations and have to use SImode memory
1986 operations plus masking logic. */
1989 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
1991 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
1992 tree expr = MEM_EXPR (new);
1993 rtx memoffset = MEM_OFFSET (new);
1994 unsigned int size = GET_MODE_SIZE (mode);
1996 /* If there are no changes, just return the original memory reference. */
2000 /* If we don't know what offset we were at within the expression, then
2001 we can't know if we've overstepped the bounds. */
2007 if (TREE_CODE (expr) == COMPONENT_REF)
2009 tree field = TREE_OPERAND (expr, 1);
2010 tree offset = component_ref_field_offset (expr);
2012 if (! DECL_SIZE_UNIT (field))
2018 /* Is the field at least as large as the access? If so, ok,
2019 otherwise strip back to the containing structure. */
2020 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2021 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2022 && INTVAL (memoffset) >= 0)
2025 if (! host_integerp (offset, 1))
2031 expr = TREE_OPERAND (expr, 0);
2033 = (GEN_INT (INTVAL (memoffset)
2034 + tree_low_cst (offset, 1)
2035 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2038 /* Similarly for the decl. */
2039 else if (DECL_P (expr)
2040 && DECL_SIZE_UNIT (expr)
2041 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2042 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2043 && (! memoffset || INTVAL (memoffset) >= 0))
2047 /* The widened memory access overflows the expression, which means
2048 that it could alias another expression. Zap it. */
2055 memoffset = NULL_RTX;
2057 /* The widened memory may alias other stuff, so zap the alias set. */
2058 /* ??? Maybe use get_alias_set on any remaining expression. */
2060 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2061 MEM_ALIGN (new), mode);
2066 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2069 gen_label_rtx (void)
2071 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2072 NULL, label_num++, NULL);
2075 /* For procedure integration. */
2077 /* Install new pointers to the first and last insns in the chain.
2078 Also, set cur_insn_uid to one higher than the last in use.
2079 Used for an inline-procedure after copying the insn chain. */
2082 set_new_first_and_last_insn (rtx first, rtx last)
2090 for (insn = first; insn; insn = NEXT_INSN (insn))
2091 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2096 /* Go through all the RTL insn bodies and copy any invalid shared
2097 structure. This routine should only be called once. */
2100 unshare_all_rtl_1 (tree fndecl, rtx insn)
2104 /* Make sure that virtual parameters are not shared. */
2105 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2106 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2108 /* Make sure that virtual stack slots are not shared. */
2109 unshare_all_decls (DECL_INITIAL (fndecl));
2111 /* Unshare just about everything else. */
2112 unshare_all_rtl_in_chain (insn);
2114 /* Make sure the addresses of stack slots found outside the insn chain
2115 (such as, in DECL_RTL of a variable) are not shared
2116 with the insn chain.
2118 This special care is necessary when the stack slot MEM does not
2119 actually appear in the insn chain. If it does appear, its address
2120 is unshared from all else at that point. */
2121 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2124 /* Go through all the RTL insn bodies and copy any invalid shared
2125 structure, again. This is a fairly expensive thing to do so it
2126 should be done sparingly. */
2129 unshare_all_rtl_again (rtx insn)
2134 for (p = insn; p; p = NEXT_INSN (p))
2137 reset_used_flags (PATTERN (p));
2138 reset_used_flags (REG_NOTES (p));
2139 reset_used_flags (LOG_LINKS (p));
2142 /* Make sure that virtual stack slots are not shared. */
2143 reset_used_decls (DECL_INITIAL (cfun->decl));
2145 /* Make sure that virtual parameters are not shared. */
2146 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2147 reset_used_flags (DECL_RTL (decl));
2149 reset_used_flags (stack_slot_list);
2151 unshare_all_rtl_1 (cfun->decl, insn);
2155 unshare_all_rtl (void)
2157 unshare_all_rtl_1 (current_function_decl, get_insns ());
2160 struct tree_opt_pass pass_unshare_all_rtl =
2162 "unshare", /* name */
2164 unshare_all_rtl, /* execute */
2167 0, /* static_pass_number */
2169 0, /* properties_required */
2170 0, /* properties_provided */
2171 0, /* properties_destroyed */
2172 0, /* todo_flags_start */
2173 TODO_dump_func, /* todo_flags_finish */
2178 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2179 Recursively does the same for subexpressions. */
2182 verify_rtx_sharing (rtx orig, rtx insn)
2187 const char *format_ptr;
2192 code = GET_CODE (x);
2194 /* These types may be freely shared. */
2209 /* SCRATCH must be shared because they represent distinct values. */
2211 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2216 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2217 a LABEL_REF, it isn't sharable. */
2218 if (GET_CODE (XEXP (x, 0)) == PLUS
2219 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2220 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2225 /* A MEM is allowed to be shared if its address is constant. */
2226 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2227 || reload_completed || reload_in_progress)
2236 /* This rtx may not be shared. If it has already been seen,
2237 replace it with a copy of itself. */
2238 #ifdef ENABLE_CHECKING
2239 if (RTX_FLAG (x, used))
2241 error ("invalid rtl sharing found in the insn");
2243 error ("shared rtx");
2245 internal_error ("internal consistency failure");
2248 gcc_assert (!RTX_FLAG (x, used));
2250 RTX_FLAG (x, used) = 1;
2252 /* Now scan the subexpressions recursively. */
2254 format_ptr = GET_RTX_FORMAT (code);
2256 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2258 switch (*format_ptr++)
2261 verify_rtx_sharing (XEXP (x, i), insn);
2265 if (XVEC (x, i) != NULL)
2268 int len = XVECLEN (x, i);
2270 for (j = 0; j < len; j++)
2272 /* We allow sharing of ASM_OPERANDS inside single
2274 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2275 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2277 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2279 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2288 /* Go through all the RTL insn bodies and check that there is no unexpected
2289 sharing in between the subexpressions. */
2292 verify_rtl_sharing (void)
2296 for (p = get_insns (); p; p = NEXT_INSN (p))
2299 reset_used_flags (PATTERN (p));
2300 reset_used_flags (REG_NOTES (p));
2301 reset_used_flags (LOG_LINKS (p));
2304 for (p = get_insns (); p; p = NEXT_INSN (p))
2307 verify_rtx_sharing (PATTERN (p), p);
2308 verify_rtx_sharing (REG_NOTES (p), p);
2309 verify_rtx_sharing (LOG_LINKS (p), p);
2313 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2314 Assumes the mark bits are cleared at entry. */
2317 unshare_all_rtl_in_chain (rtx insn)
2319 for (; insn; insn = NEXT_INSN (insn))
2322 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2323 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2324 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2328 /* Go through all virtual stack slots of a function and copy any
2329 shared structure. */
2331 unshare_all_decls (tree blk)
2335 /* Copy shared decls. */
2336 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2337 if (DECL_RTL_SET_P (t))
2338 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2340 /* Now process sub-blocks. */
2341 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2342 unshare_all_decls (t);
2345 /* Go through all virtual stack slots of a function and mark them as
2348 reset_used_decls (tree blk)
2353 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2354 if (DECL_RTL_SET_P (t))
2355 reset_used_flags (DECL_RTL (t));
2357 /* Now process sub-blocks. */
2358 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2359 reset_used_decls (t);
2362 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2363 Recursively does the same for subexpressions. Uses
2364 copy_rtx_if_shared_1 to reduce stack space. */
2367 copy_rtx_if_shared (rtx orig)
2369 copy_rtx_if_shared_1 (&orig);
2373 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2374 use. Recursively does the same for subexpressions. */
2377 copy_rtx_if_shared_1 (rtx *orig1)
2383 const char *format_ptr;
2387 /* Repeat is used to turn tail-recursion into iteration. */
2394 code = GET_CODE (x);
2396 /* These types may be freely shared. */
2410 /* SCRATCH must be shared because they represent distinct values. */
2413 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2418 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2419 a LABEL_REF, it isn't sharable. */
2420 if (GET_CODE (XEXP (x, 0)) == PLUS
2421 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2422 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2431 /* The chain of insns is not being copied. */
2438 /* This rtx may not be shared. If it has already been seen,
2439 replace it with a copy of itself. */
2441 if (RTX_FLAG (x, used))
2445 copy = rtx_alloc (code);
2446 memcpy (copy, x, RTX_SIZE (code));
2450 RTX_FLAG (x, used) = 1;
2452 /* Now scan the subexpressions recursively.
2453 We can store any replaced subexpressions directly into X
2454 since we know X is not shared! Any vectors in X
2455 must be copied if X was copied. */
2457 format_ptr = GET_RTX_FORMAT (code);
2458 length = GET_RTX_LENGTH (code);
2461 for (i = 0; i < length; i++)
2463 switch (*format_ptr++)
2467 copy_rtx_if_shared_1 (last_ptr);
2468 last_ptr = &XEXP (x, i);
2472 if (XVEC (x, i) != NULL)
2475 int len = XVECLEN (x, i);
2477 /* Copy the vector iff I copied the rtx and the length
2479 if (copied && len > 0)
2480 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2482 /* Call recursively on all inside the vector. */
2483 for (j = 0; j < len; j++)
2486 copy_rtx_if_shared_1 (last_ptr);
2487 last_ptr = &XVECEXP (x, i, j);
2502 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2503 to look for shared sub-parts. */
2506 reset_used_flags (rtx x)
2510 const char *format_ptr;
2513 /* Repeat is used to turn tail-recursion into iteration. */
2518 code = GET_CODE (x);
2520 /* These types may be freely shared so we needn't do any resetting
2541 /* The chain of insns is not being copied. */
2548 RTX_FLAG (x, used) = 0;
2550 format_ptr = GET_RTX_FORMAT (code);
2551 length = GET_RTX_LENGTH (code);
2553 for (i = 0; i < length; i++)
2555 switch (*format_ptr++)
2563 reset_used_flags (XEXP (x, i));
2567 for (j = 0; j < XVECLEN (x, i); j++)
2568 reset_used_flags (XVECEXP (x, i, j));
2574 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2575 to look for shared sub-parts. */
2578 set_used_flags (rtx x)
2582 const char *format_ptr;
2587 code = GET_CODE (x);
2589 /* These types may be freely shared so we needn't do any resetting
2610 /* The chain of insns is not being copied. */
2617 RTX_FLAG (x, used) = 1;
2619 format_ptr = GET_RTX_FORMAT (code);
2620 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2622 switch (*format_ptr++)
2625 set_used_flags (XEXP (x, i));
2629 for (j = 0; j < XVECLEN (x, i); j++)
2630 set_used_flags (XVECEXP (x, i, j));
2636 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2637 Return X or the rtx for the pseudo reg the value of X was copied into.
2638 OTHER must be valid as a SET_DEST. */
2641 make_safe_from (rtx x, rtx other)
2644 switch (GET_CODE (other))
2647 other = SUBREG_REG (other);
2649 case STRICT_LOW_PART:
2652 other = XEXP (other, 0);
2661 && GET_CODE (x) != SUBREG)
2663 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2664 || reg_mentioned_p (other, x))))
2666 rtx temp = gen_reg_rtx (GET_MODE (x));
2667 emit_move_insn (temp, x);
2673 /* Emission of insns (adding them to the doubly-linked list). */
2675 /* Return the first insn of the current sequence or current function. */
2683 /* Specify a new insn as the first in the chain. */
2686 set_first_insn (rtx insn)
2688 gcc_assert (!PREV_INSN (insn));
2692 /* Return the last insn emitted in current sequence or current function. */
2695 get_last_insn (void)
2700 /* Specify a new insn as the last in the chain. */
2703 set_last_insn (rtx insn)
2705 gcc_assert (!NEXT_INSN (insn));
2709 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2712 get_last_insn_anywhere (void)
2714 struct sequence_stack *stack;
2717 for (stack = seq_stack; stack; stack = stack->next)
2718 if (stack->last != 0)
2723 /* Return the first nonnote insn emitted in current sequence or current
2724 function. This routine looks inside SEQUENCEs. */
2727 get_first_nonnote_insn (void)
2729 rtx insn = first_insn;
2734 for (insn = next_insn (insn);
2735 insn && NOTE_P (insn);
2736 insn = next_insn (insn))
2740 if (NONJUMP_INSN_P (insn)
2741 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2742 insn = XVECEXP (PATTERN (insn), 0, 0);
2749 /* Return the last nonnote insn emitted in current sequence or current
2750 function. This routine looks inside SEQUENCEs. */
2753 get_last_nonnote_insn (void)
2755 rtx insn = last_insn;
2760 for (insn = previous_insn (insn);
2761 insn && NOTE_P (insn);
2762 insn = previous_insn (insn))
2766 if (NONJUMP_INSN_P (insn)
2767 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2768 insn = XVECEXP (PATTERN (insn), 0,
2769 XVECLEN (PATTERN (insn), 0) - 1);
2776 /* Return a number larger than any instruction's uid in this function. */
2781 return cur_insn_uid;
2784 /* Renumber instructions so that no instruction UIDs are wasted. */
2787 renumber_insns (FILE *stream)
2791 /* If we're not supposed to renumber instructions, don't. */
2792 if (!flag_renumber_insns)
2795 /* If there aren't that many instructions, then it's not really
2796 worth renumbering them. */
2797 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2802 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2805 fprintf (stream, "Renumbering insn %d to %d\n",
2806 INSN_UID (insn), cur_insn_uid);
2807 INSN_UID (insn) = cur_insn_uid++;
2811 /* Return the next insn. If it is a SEQUENCE, return the first insn
2815 next_insn (rtx insn)
2819 insn = NEXT_INSN (insn);
2820 if (insn && NONJUMP_INSN_P (insn)
2821 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2822 insn = XVECEXP (PATTERN (insn), 0, 0);
2828 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2832 previous_insn (rtx insn)
2836 insn = PREV_INSN (insn);
2837 if (insn && NONJUMP_INSN_P (insn)
2838 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2839 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2845 /* Return the next insn after INSN that is not a NOTE. This routine does not
2846 look inside SEQUENCEs. */
2849 next_nonnote_insn (rtx insn)
2853 insn = NEXT_INSN (insn);
2854 if (insn == 0 || !NOTE_P (insn))
2861 /* Return the previous insn before INSN that is not a NOTE. This routine does
2862 not look inside SEQUENCEs. */
2865 prev_nonnote_insn (rtx insn)
2869 insn = PREV_INSN (insn);
2870 if (insn == 0 || !NOTE_P (insn))
2877 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2878 or 0, if there is none. This routine does not look inside
2882 next_real_insn (rtx insn)
2886 insn = NEXT_INSN (insn);
2887 if (insn == 0 || INSN_P (insn))
2894 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2895 or 0, if there is none. This routine does not look inside
2899 prev_real_insn (rtx insn)
2903 insn = PREV_INSN (insn);
2904 if (insn == 0 || INSN_P (insn))
2911 /* Return the last CALL_INSN in the current list, or 0 if there is none.
2912 This routine does not look inside SEQUENCEs. */
2915 last_call_insn (void)
2919 for (insn = get_last_insn ();
2920 insn && !CALL_P (insn);
2921 insn = PREV_INSN (insn))
2927 /* Find the next insn after INSN that really does something. This routine
2928 does not look inside SEQUENCEs. Until reload has completed, this is the
2929 same as next_real_insn. */
2932 active_insn_p (rtx insn)
2934 return (CALL_P (insn) || JUMP_P (insn)
2935 || (NONJUMP_INSN_P (insn)
2936 && (! reload_completed
2937 || (GET_CODE (PATTERN (insn)) != USE
2938 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2942 next_active_insn (rtx insn)
2946 insn = NEXT_INSN (insn);
2947 if (insn == 0 || active_insn_p (insn))
2954 /* Find the last insn before INSN that really does something. This routine
2955 does not look inside SEQUENCEs. Until reload has completed, this is the
2956 same as prev_real_insn. */
2959 prev_active_insn (rtx insn)
2963 insn = PREV_INSN (insn);
2964 if (insn == 0 || active_insn_p (insn))
2971 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2974 next_label (rtx insn)
2978 insn = NEXT_INSN (insn);
2979 if (insn == 0 || LABEL_P (insn))
2986 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2989 prev_label (rtx insn)
2993 insn = PREV_INSN (insn);
2994 if (insn == 0 || LABEL_P (insn))
3001 /* Return the last label to mark the same position as LABEL. Return null
3002 if LABEL itself is null. */
3005 skip_consecutive_labels (rtx label)
3009 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3017 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3018 and REG_CC_USER notes so we can find it. */
3021 link_cc0_insns (rtx insn)
3023 rtx user = next_nonnote_insn (insn);
3025 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3026 user = XVECEXP (PATTERN (user), 0, 0);
3028 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3030 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3033 /* Return the next insn that uses CC0 after INSN, which is assumed to
3034 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3035 applied to the result of this function should yield INSN).
3037 Normally, this is simply the next insn. However, if a REG_CC_USER note
3038 is present, it contains the insn that uses CC0.
3040 Return 0 if we can't find the insn. */
3043 next_cc0_user (rtx insn)
3045 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3048 return XEXP (note, 0);
3050 insn = next_nonnote_insn (insn);
3051 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3052 insn = XVECEXP (PATTERN (insn), 0, 0);
3054 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3060 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3061 note, it is the previous insn. */
3064 prev_cc0_setter (rtx insn)
3066 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3069 return XEXP (note, 0);
3071 insn = prev_nonnote_insn (insn);
3072 gcc_assert (sets_cc0_p (PATTERN (insn)));
3078 /* Increment the label uses for all labels present in rtx. */
3081 mark_label_nuses (rtx x)
3087 code = GET_CODE (x);
3088 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3089 LABEL_NUSES (XEXP (x, 0))++;
3091 fmt = GET_RTX_FORMAT (code);
3092 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3095 mark_label_nuses (XEXP (x, i));
3096 else if (fmt[i] == 'E')
3097 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3098 mark_label_nuses (XVECEXP (x, i, j));
3103 /* Try splitting insns that can be split for better scheduling.
3104 PAT is the pattern which might split.
3105 TRIAL is the insn providing PAT.
3106 LAST is nonzero if we should return the last insn of the sequence produced.
3108 If this routine succeeds in splitting, it returns the first or last
3109 replacement insn depending on the value of LAST. Otherwise, it
3110 returns TRIAL. If the insn to be returned can be split, it will be. */
3113 try_split (rtx pat, rtx trial, int last)
3115 rtx before = PREV_INSN (trial);
3116 rtx after = NEXT_INSN (trial);
3117 int has_barrier = 0;
3121 rtx insn_last, insn;
3124 if (any_condjump_p (trial)
3125 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3126 split_branch_probability = INTVAL (XEXP (note, 0));
3127 probability = split_branch_probability;
3129 seq = split_insns (pat, trial);
3131 split_branch_probability = -1;
3133 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3134 We may need to handle this specially. */
3135 if (after && BARRIER_P (after))
3138 after = NEXT_INSN (after);
3144 /* Avoid infinite loop if any insn of the result matches
3145 the original pattern. */
3149 if (INSN_P (insn_last)
3150 && rtx_equal_p (PATTERN (insn_last), pat))
3152 if (!NEXT_INSN (insn_last))
3154 insn_last = NEXT_INSN (insn_last);
3158 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3162 mark_jump_label (PATTERN (insn), insn, 0);
3164 if (probability != -1
3165 && any_condjump_p (insn)
3166 && !find_reg_note (insn, REG_BR_PROB, 0))
3168 /* We can preserve the REG_BR_PROB notes only if exactly
3169 one jump is created, otherwise the machine description
3170 is responsible for this step using
3171 split_branch_probability variable. */
3172 gcc_assert (njumps == 1);
3174 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3175 GEN_INT (probability),
3181 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3182 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3185 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3188 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3191 *p = CALL_INSN_FUNCTION_USAGE (trial);
3192 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3196 /* Copy notes, particularly those related to the CFG. */
3197 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3199 switch (REG_NOTE_KIND (note))
3203 while (insn != NULL_RTX)
3206 || (flag_non_call_exceptions && INSN_P (insn)
3207 && may_trap_p (PATTERN (insn))))
3209 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3212 insn = PREV_INSN (insn);
3219 while (insn != NULL_RTX)
3223 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3226 insn = PREV_INSN (insn);
3230 case REG_NON_LOCAL_GOTO:
3232 while (insn != NULL_RTX)
3236 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3239 insn = PREV_INSN (insn);
3248 /* If there are LABELS inside the split insns increment the
3249 usage count so we don't delete the label. */
3250 if (NONJUMP_INSN_P (trial))
3253 while (insn != NULL_RTX)
3255 if (NONJUMP_INSN_P (insn))
3256 mark_label_nuses (PATTERN (insn));
3258 insn = PREV_INSN (insn);
3262 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3264 delete_insn (trial);
3266 emit_barrier_after (tem);
3268 /* Recursively call try_split for each new insn created; by the
3269 time control returns here that insn will be fully split, so
3270 set LAST and continue from the insn after the one returned.
3271 We can't use next_active_insn here since AFTER may be a note.
3272 Ignore deleted insns, which can be occur if not optimizing. */
3273 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3274 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3275 tem = try_split (PATTERN (tem), tem, 1);
3277 /* Return either the first or the last insn, depending on which was
3280 ? (after ? PREV_INSN (after) : last_insn)
3281 : NEXT_INSN (before);
3284 /* Make and return an INSN rtx, initializing all its slots.
3285 Store PATTERN in the pattern slots. */
3288 make_insn_raw (rtx pattern)
3292 insn = rtx_alloc (INSN);
3294 INSN_UID (insn) = cur_insn_uid++;
3295 PATTERN (insn) = pattern;
3296 INSN_CODE (insn) = -1;
3297 LOG_LINKS (insn) = NULL;
3298 REG_NOTES (insn) = NULL;
3299 INSN_LOCATOR (insn) = 0;
3300 BLOCK_FOR_INSN (insn) = NULL;
3302 #ifdef ENABLE_RTL_CHECKING
3305 && (returnjump_p (insn)
3306 || (GET_CODE (insn) == SET
3307 && SET_DEST (insn) == pc_rtx)))
3309 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3317 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3320 make_jump_insn_raw (rtx pattern)
3324 insn = rtx_alloc (JUMP_INSN);
3325 INSN_UID (insn) = cur_insn_uid++;
3327 PATTERN (insn) = pattern;
3328 INSN_CODE (insn) = -1;
3329 LOG_LINKS (insn) = NULL;
3330 REG_NOTES (insn) = NULL;
3331 JUMP_LABEL (insn) = NULL;
3332 INSN_LOCATOR (insn) = 0;
3333 BLOCK_FOR_INSN (insn) = NULL;
3338 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3341 make_call_insn_raw (rtx pattern)
3345 insn = rtx_alloc (CALL_INSN);
3346 INSN_UID (insn) = cur_insn_uid++;
3348 PATTERN (insn) = pattern;
3349 INSN_CODE (insn) = -1;
3350 LOG_LINKS (insn) = NULL;
3351 REG_NOTES (insn) = NULL;
3352 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3353 INSN_LOCATOR (insn) = 0;
3354 BLOCK_FOR_INSN (insn) = NULL;
3359 /* Add INSN to the end of the doubly-linked list.
3360 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3365 PREV_INSN (insn) = last_insn;
3366 NEXT_INSN (insn) = 0;
3368 if (NULL != last_insn)
3369 NEXT_INSN (last_insn) = insn;
3371 if (NULL == first_insn)
3377 /* Add INSN into the doubly-linked list after insn AFTER. This and
3378 the next should be the only functions called to insert an insn once
3379 delay slots have been filled since only they know how to update a
3383 add_insn_after (rtx insn, rtx after)
3385 rtx next = NEXT_INSN (after);
3388 gcc_assert (!optimize || !INSN_DELETED_P (after));
3390 NEXT_INSN (insn) = next;
3391 PREV_INSN (insn) = after;
3395 PREV_INSN (next) = insn;
3396 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3397 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3399 else if (last_insn == after)
3403 struct sequence_stack *stack = seq_stack;
3404 /* Scan all pending sequences too. */
3405 for (; stack; stack = stack->next)
3406 if (after == stack->last)
3415 if (!BARRIER_P (after)
3416 && !BARRIER_P (insn)
3417 && (bb = BLOCK_FOR_INSN (after)))
3419 set_block_for_insn (insn, bb);
3421 bb->flags |= BB_DIRTY;
3422 /* Should not happen as first in the BB is always
3423 either NOTE or LABEL. */
3424 if (BB_END (bb) == after
3425 /* Avoid clobbering of structure when creating new BB. */
3426 && !BARRIER_P (insn)
3428 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3432 NEXT_INSN (after) = insn;
3433 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3435 rtx sequence = PATTERN (after);
3436 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3440 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3441 the previous should be the only functions called to insert an insn once
3442 delay slots have been filled since only they know how to update a
3446 add_insn_before (rtx insn, rtx before)
3448 rtx prev = PREV_INSN (before);
3451 gcc_assert (!optimize || !INSN_DELETED_P (before));
3453 PREV_INSN (insn) = prev;
3454 NEXT_INSN (insn) = before;
3458 NEXT_INSN (prev) = insn;
3459 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3461 rtx sequence = PATTERN (prev);
3462 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3465 else if (first_insn == before)
3469 struct sequence_stack *stack = seq_stack;
3470 /* Scan all pending sequences too. */
3471 for (; stack; stack = stack->next)
3472 if (before == stack->first)
3474 stack->first = insn;
3481 if (!BARRIER_P (before)
3482 && !BARRIER_P (insn)
3483 && (bb = BLOCK_FOR_INSN (before)))
3485 set_block_for_insn (insn, bb);
3487 bb->flags |= BB_DIRTY;
3488 /* Should not happen as first in the BB is always either NOTE or
3490 gcc_assert (BB_HEAD (bb) != insn
3491 /* Avoid clobbering of structure when creating new BB. */
3494 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK));
3497 PREV_INSN (before) = insn;
3498 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3499 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3502 /* Remove an insn from its doubly-linked list. This function knows how
3503 to handle sequences. */
3505 remove_insn (rtx insn)
3507 rtx next = NEXT_INSN (insn);
3508 rtx prev = PREV_INSN (insn);
3513 NEXT_INSN (prev) = next;
3514 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3516 rtx sequence = PATTERN (prev);
3517 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3520 else if (first_insn == insn)
3524 struct sequence_stack *stack = seq_stack;
3525 /* Scan all pending sequences too. */
3526 for (; stack; stack = stack->next)
3527 if (insn == stack->first)
3529 stack->first = next;
3538 PREV_INSN (next) = prev;
3539 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3540 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3542 else if (last_insn == insn)
3546 struct sequence_stack *stack = seq_stack;
3547 /* Scan all pending sequences too. */
3548 for (; stack; stack = stack->next)
3549 if (insn == stack->last)
3557 if (!BARRIER_P (insn)
3558 && (bb = BLOCK_FOR_INSN (insn)))
3561 bb->flags |= BB_DIRTY;
3562 if (BB_HEAD (bb) == insn)
3564 /* Never ever delete the basic block note without deleting whole
3566 gcc_assert (!NOTE_P (insn));
3567 BB_HEAD (bb) = next;
3569 if (BB_END (bb) == insn)
3574 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3577 add_function_usage_to (rtx call_insn, rtx call_fusage)
3579 gcc_assert (call_insn && CALL_P (call_insn));
3581 /* Put the register usage information on the CALL. If there is already
3582 some usage information, put ours at the end. */
3583 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3587 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3588 link = XEXP (link, 1))
3591 XEXP (link, 1) = call_fusage;
3594 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3597 /* Delete all insns made since FROM.
3598 FROM becomes the new last instruction. */
3601 delete_insns_since (rtx from)
3606 NEXT_INSN (from) = 0;
3610 /* This function is deprecated, please use sequences instead.
3612 Move a consecutive bunch of insns to a different place in the chain.
3613 The insns to be moved are those between FROM and TO.
3614 They are moved to a new position after the insn AFTER.
3615 AFTER must not be FROM or TO or any insn in between.
3617 This function does not know about SEQUENCEs and hence should not be
3618 called after delay-slot filling has been done. */
3621 reorder_insns_nobb (rtx from, rtx to, rtx after)
3623 /* Splice this bunch out of where it is now. */
3624 if (PREV_INSN (from))
3625 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3627 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3628 if (last_insn == to)
3629 last_insn = PREV_INSN (from);
3630 if (first_insn == from)
3631 first_insn = NEXT_INSN (to);
3633 /* Make the new neighbors point to it and it to them. */
3634 if (NEXT_INSN (after))
3635 PREV_INSN (NEXT_INSN (after)) = to;
3637 NEXT_INSN (to) = NEXT_INSN (after);
3638 PREV_INSN (from) = after;
3639 NEXT_INSN (after) = from;
3640 if (after == last_insn)
3644 /* Same as function above, but take care to update BB boundaries. */
3646 reorder_insns (rtx from, rtx to, rtx after)
3648 rtx prev = PREV_INSN (from);
3649 basic_block bb, bb2;
3651 reorder_insns_nobb (from, to, after);
3653 if (!BARRIER_P (after)
3654 && (bb = BLOCK_FOR_INSN (after)))
3657 bb->flags |= BB_DIRTY;
3659 if (!BARRIER_P (from)
3660 && (bb2 = BLOCK_FOR_INSN (from)))
3662 if (BB_END (bb2) == to)
3663 BB_END (bb2) = prev;
3664 bb2->flags |= BB_DIRTY;
3667 if (BB_END (bb) == after)
3670 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3672 set_block_for_insn (x, bb);
3676 /* Return the line note insn preceding INSN. */
3679 find_line_note (rtx insn)
3681 if (no_line_numbers)
3684 for (; insn; insn = PREV_INSN (insn))
3686 && NOTE_LINE_NUMBER (insn) >= 0)
3692 /* Remove unnecessary notes from the instruction stream. */
3695 remove_unnecessary_notes (void)
3697 rtx eh_stack = NULL_RTX;
3702 /* We must not remove the first instruction in the function because
3703 the compiler depends on the first instruction being a note. */
3704 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3706 /* Remember what's next. */
3707 next = NEXT_INSN (insn);
3709 /* We're only interested in notes. */
3713 switch (NOTE_LINE_NUMBER (insn))
3715 case NOTE_INSN_DELETED:
3719 case NOTE_INSN_EH_REGION_BEG:
3720 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3723 case NOTE_INSN_EH_REGION_END:
3724 /* Too many end notes. */
3725 gcc_assert (eh_stack);
3726 /* Mismatched nesting. */
3727 gcc_assert (NOTE_EH_HANDLER (XEXP (eh_stack, 0))
3728 == NOTE_EH_HANDLER (insn));
3730 eh_stack = XEXP (eh_stack, 1);
3731 free_INSN_LIST_node (tmp);
3734 case NOTE_INSN_BLOCK_BEG:
3735 case NOTE_INSN_BLOCK_END:
3736 /* BLOCK_END and BLOCK_BEG notes only exist in the `final' pass. */
3744 /* Too many EH_REGION_BEG notes. */
3745 gcc_assert (!eh_stack);
3748 struct tree_opt_pass pass_remove_unnecessary_notes =
3750 "eunotes", /* name */
3752 remove_unnecessary_notes, /* execute */
3755 0, /* static_pass_number */
3757 0, /* properties_required */
3758 0, /* properties_provided */
3759 0, /* properties_destroyed */
3760 0, /* todo_flags_start */
3761 TODO_dump_func, /* todo_flags_finish */
3766 /* Emit insn(s) of given code and pattern
3767 at a specified place within the doubly-linked list.
3769 All of the emit_foo global entry points accept an object
3770 X which is either an insn list or a PATTERN of a single
3773 There are thus a few canonical ways to generate code and
3774 emit it at a specific place in the instruction stream. For
3775 example, consider the instruction named SPOT and the fact that
3776 we would like to emit some instructions before SPOT. We might
3780 ... emit the new instructions ...
3781 insns_head = get_insns ();
3784 emit_insn_before (insns_head, SPOT);
3786 It used to be common to generate SEQUENCE rtl instead, but that
3787 is a relic of the past which no longer occurs. The reason is that
3788 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3789 generated would almost certainly die right after it was created. */
3791 /* Make X be output before the instruction BEFORE. */
3794 emit_insn_before_noloc (rtx x, rtx before)
3799 gcc_assert (before);
3804 switch (GET_CODE (x))
3815 rtx next = NEXT_INSN (insn);
3816 add_insn_before (insn, before);
3822 #ifdef ENABLE_RTL_CHECKING
3829 last = make_insn_raw (x);
3830 add_insn_before (last, before);
3837 /* Make an instruction with body X and code JUMP_INSN
3838 and output it before the instruction BEFORE. */
3841 emit_jump_insn_before_noloc (rtx x, rtx before)
3843 rtx insn, last = NULL_RTX;
3845 gcc_assert (before);
3847 switch (GET_CODE (x))
3858 rtx next = NEXT_INSN (insn);
3859 add_insn_before (insn, before);
3865 #ifdef ENABLE_RTL_CHECKING
3872 last = make_jump_insn_raw (x);
3873 add_insn_before (last, before);
3880 /* Make an instruction with body X and code CALL_INSN
3881 and output it before the instruction BEFORE. */
3884 emit_call_insn_before_noloc (rtx x, rtx before)
3886 rtx last = NULL_RTX, insn;
3888 gcc_assert (before);
3890 switch (GET_CODE (x))
3901 rtx next = NEXT_INSN (insn);
3902 add_insn_before (insn, before);
3908 #ifdef ENABLE_RTL_CHECKING
3915 last = make_call_insn_raw (x);
3916 add_insn_before (last, before);
3923 /* Make an insn of code BARRIER
3924 and output it before the insn BEFORE. */
3927 emit_barrier_before (rtx before)
3929 rtx insn = rtx_alloc (BARRIER);
3931 INSN_UID (insn) = cur_insn_uid++;
3933 add_insn_before (insn, before);
3937 /* Emit the label LABEL before the insn BEFORE. */
3940 emit_label_before (rtx label, rtx before)
3942 /* This can be called twice for the same label as a result of the
3943 confusion that follows a syntax error! So make it harmless. */
3944 if (INSN_UID (label) == 0)
3946 INSN_UID (label) = cur_insn_uid++;
3947 add_insn_before (label, before);
3953 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3956 emit_note_before (int subtype, rtx before)
3958 rtx note = rtx_alloc (NOTE);
3959 INSN_UID (note) = cur_insn_uid++;
3960 #ifndef USE_MAPPED_LOCATION
3961 NOTE_SOURCE_FILE (note) = 0;
3963 NOTE_LINE_NUMBER (note) = subtype;
3964 BLOCK_FOR_INSN (note) = NULL;
3966 add_insn_before (note, before);
3970 /* Helper for emit_insn_after, handles lists of instructions
3973 static rtx emit_insn_after_1 (rtx, rtx);
3976 emit_insn_after_1 (rtx first, rtx after)
3982 if (!BARRIER_P (after)
3983 && (bb = BLOCK_FOR_INSN (after)))
3985 bb->flags |= BB_DIRTY;
3986 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3987 if (!BARRIER_P (last))
3988 set_block_for_insn (last, bb);
3989 if (!BARRIER_P (last))
3990 set_block_for_insn (last, bb);
3991 if (BB_END (bb) == after)
3995 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3998 after_after = NEXT_INSN (after);
4000 NEXT_INSN (after) = first;
4001 PREV_INSN (first) = after;
4002 NEXT_INSN (last) = after_after;
4004 PREV_INSN (after_after) = last;
4006 if (after == last_insn)
4011 /* Make X be output after the insn AFTER. */
4014 emit_insn_after_noloc (rtx x, rtx after)
4023 switch (GET_CODE (x))
4031 last = emit_insn_after_1 (x, after);
4034 #ifdef ENABLE_RTL_CHECKING
4041 last = make_insn_raw (x);
4042 add_insn_after (last, after);
4049 /* Similar to emit_insn_after, except that line notes are to be inserted so
4050 as to act as if this insn were at FROM. */
4053 emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
4055 rtx from_line = find_line_note (from);
4056 rtx after_line = find_line_note (after);
4057 rtx insn = emit_insn_after (x, after);
4060 emit_note_copy_after (from_line, after);
4063 emit_note_copy_after (after_line, insn);
4066 /* Make an insn of code JUMP_INSN with body X
4067 and output it after the insn AFTER. */
4070 emit_jump_insn_after_noloc (rtx x, rtx after)
4076 switch (GET_CODE (x))
4084 last = emit_insn_after_1 (x, after);
4087 #ifdef ENABLE_RTL_CHECKING
4094 last = make_jump_insn_raw (x);
4095 add_insn_after (last, after);
4102 /* Make an instruction with body X and code CALL_INSN
4103 and output it after the instruction AFTER. */
4106 emit_call_insn_after_noloc (rtx x, rtx after)
4112 switch (GET_CODE (x))
4120 last = emit_insn_after_1 (x, after);
4123 #ifdef ENABLE_RTL_CHECKING
4130 last = make_call_insn_raw (x);
4131 add_insn_after (last, after);
4138 /* Make an insn of code BARRIER
4139 and output it after the insn AFTER. */
4142 emit_barrier_after (rtx after)
4144 rtx insn = rtx_alloc (BARRIER);
4146 INSN_UID (insn) = cur_insn_uid++;
4148 add_insn_after (insn, after);
4152 /* Emit the label LABEL after the insn AFTER. */
4155 emit_label_after (rtx label, rtx after)
4157 /* This can be called twice for the same label
4158 as a result of the confusion that follows a syntax error!
4159 So make it harmless. */
4160 if (INSN_UID (label) == 0)
4162 INSN_UID (label) = cur_insn_uid++;
4163 add_insn_after (label, after);
4169 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4172 emit_note_after (int subtype, rtx after)
4174 rtx note = rtx_alloc (NOTE);
4175 INSN_UID (note) = cur_insn_uid++;
4176 #ifndef USE_MAPPED_LOCATION
4177 NOTE_SOURCE_FILE (note) = 0;
4179 NOTE_LINE_NUMBER (note) = subtype;
4180 BLOCK_FOR_INSN (note) = NULL;
4181 add_insn_after (note, after);
4185 /* Emit a copy of note ORIG after the insn AFTER. */
4188 emit_note_copy_after (rtx orig, rtx after)
4192 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4198 note = rtx_alloc (NOTE);
4199 INSN_UID (note) = cur_insn_uid++;
4200 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4201 NOTE_DATA (note) = NOTE_DATA (orig);
4202 BLOCK_FOR_INSN (note) = NULL;
4203 add_insn_after (note, after);
4207 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4209 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4211 rtx last = emit_insn_after_noloc (pattern, after);
4213 if (pattern == NULL_RTX || !loc)
4216 after = NEXT_INSN (after);
4219 if (active_insn_p (after) && !INSN_LOCATOR (after))
4220 INSN_LOCATOR (after) = loc;
4223 after = NEXT_INSN (after);
4228 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4230 emit_insn_after (rtx pattern, rtx after)
4233 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4235 return emit_insn_after_noloc (pattern, after);
4238 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4240 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4242 rtx last = emit_jump_insn_after_noloc (pattern, after);
4244 if (pattern == NULL_RTX || !loc)
4247 after = NEXT_INSN (after);
4250 if (active_insn_p (after) && !INSN_LOCATOR (after))
4251 INSN_LOCATOR (after) = loc;
4254 after = NEXT_INSN (after);
4259 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4261 emit_jump_insn_after (rtx pattern, rtx after)
4264 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4266 return emit_jump_insn_after_noloc (pattern, after);
4269 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4271 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4273 rtx last = emit_call_insn_after_noloc (pattern, after);
4275 if (pattern == NULL_RTX || !loc)
4278 after = NEXT_INSN (after);
4281 if (active_insn_p (after) && !INSN_LOCATOR (after))
4282 INSN_LOCATOR (after) = loc;
4285 after = NEXT_INSN (after);
4290 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4292 emit_call_insn_after (rtx pattern, rtx after)
4295 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4297 return emit_call_insn_after_noloc (pattern, after);
4300 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
4302 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4304 rtx first = PREV_INSN (before);
4305 rtx last = emit_insn_before_noloc (pattern, before);
4307 if (pattern == NULL_RTX || !loc)
4310 first = NEXT_INSN (first);
4313 if (active_insn_p (first) && !INSN_LOCATOR (first))
4314 INSN_LOCATOR (first) = loc;
4317 first = NEXT_INSN (first);
4322 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4324 emit_insn_before (rtx pattern, rtx before)
4326 if (INSN_P (before))
4327 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4329 return emit_insn_before_noloc (pattern, before);
4332 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4334 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4336 rtx first = PREV_INSN (before);
4337 rtx last = emit_jump_insn_before_noloc (pattern, before);
4339 if (pattern == NULL_RTX)
4342 first = NEXT_INSN (first);
4345 if (active_insn_p (first) && !INSN_LOCATOR (first))
4346 INSN_LOCATOR (first) = loc;
4349 first = NEXT_INSN (first);
4354 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4356 emit_jump_insn_before (rtx pattern, rtx before)
4358 if (INSN_P (before))
4359 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4361 return emit_jump_insn_before_noloc (pattern, before);
4364 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4366 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4368 rtx first = PREV_INSN (before);
4369 rtx last = emit_call_insn_before_noloc (pattern, before);
4371 if (pattern == NULL_RTX)
4374 first = NEXT_INSN (first);
4377 if (active_insn_p (first) && !INSN_LOCATOR (first))
4378 INSN_LOCATOR (first) = loc;
4381 first = NEXT_INSN (first);
4386 /* like emit_call_insn_before_noloc,
4387 but set insn_locator according to before. */