1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
5 Free Software Foundation, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* Middle-to-low level generation of rtx code and insns.
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
39 #include "coretypes.h"
41 #include "diagnostic-core.h"
49 #include "hard-reg-set.h"
51 #include "insn-config.h"
54 #include "basic-block.h"
57 #include "langhooks.h"
58 #include "tree-pass.h"
63 struct target_rtl default_target_rtl;
65 struct target_rtl *this_target_rtl = &default_target_rtl;
68 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
70 /* Commonly used modes. */
72 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
73 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
74 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
75 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
77 /* Datastructures maintained for currently processed function in RTL form. */
79 struct rtl_data x_rtl;
81 /* Indexed by pseudo register number, gives the rtx for that pseudo.
82 Allocated in parallel with regno_pointer_align.
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
88 /* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
91 static GTY(()) int label_num = 1;
93 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
95 record a copy of const[012]_rtx. */
97 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
101 REAL_VALUE_TYPE dconst0;
102 REAL_VALUE_TYPE dconst1;
103 REAL_VALUE_TYPE dconst2;
104 REAL_VALUE_TYPE dconstm1;
105 REAL_VALUE_TYPE dconsthalf;
107 /* Record fixed-point constant 0 and 1. */
108 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
109 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111 /* We make one copy of (const_int C) where C is in
112 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
113 to save space during the compilation and simplify comparisons of
116 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
118 /* A hash table storing CONST_INTs whose absolute value is greater
119 than MAX_SAVED_CONST_INT. */
121 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
122 htab_t const_int_htab;
124 /* A hash table storing memory attribute structures. */
125 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
126 htab_t mem_attrs_htab;
128 /* A hash table storing register attribute structures. */
129 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
130 htab_t reg_attrs_htab;
132 /* A hash table storing all CONST_DOUBLEs. */
133 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
134 htab_t const_double_htab;
136 /* A hash table storing all CONST_FIXEDs. */
137 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
138 htab_t const_fixed_htab;
140 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
141 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
142 #define last_location (crtl->emit.x_last_location)
143 #define first_label_num (crtl->emit.x_first_label_num)
145 static rtx make_call_insn_raw (rtx);
146 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
147 static void set_used_decls (tree);
148 static void mark_label_nuses (rtx);
149 static hashval_t const_int_htab_hash (const void *);
150 static int const_int_htab_eq (const void *, const void *);
151 static hashval_t const_double_htab_hash (const void *);
152 static int const_double_htab_eq (const void *, const void *);
153 static rtx lookup_const_double (rtx);
154 static hashval_t const_fixed_htab_hash (const void *);
155 static int const_fixed_htab_eq (const void *, const void *);
156 static rtx lookup_const_fixed (rtx);
157 static hashval_t mem_attrs_htab_hash (const void *);
158 static int mem_attrs_htab_eq (const void *, const void *);
159 static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
160 addr_space_t, enum machine_mode);
161 static hashval_t reg_attrs_htab_hash (const void *);
162 static int reg_attrs_htab_eq (const void *, const void *);
163 static reg_attrs *get_reg_attrs (tree, int);
164 static rtx gen_const_vector (enum machine_mode, int);
165 static void copy_rtx_if_shared_1 (rtx *orig);
167 /* Probability of the conditional branch currently proceeded by try_split.
168 Set to -1 otherwise. */
169 int split_branch_probability = -1;
171 /* Returns a hash code for X (which is a really a CONST_INT). */
174 const_int_htab_hash (const void *x)
176 return (hashval_t) INTVAL ((const_rtx) x);
179 /* Returns nonzero if the value represented by X (which is really a
180 CONST_INT) is the same as that given by Y (which is really a
184 const_int_htab_eq (const void *x, const void *y)
186 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
189 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
191 const_double_htab_hash (const void *x)
193 const_rtx const value = (const_rtx) x;
196 if (GET_MODE (value) == VOIDmode)
197 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
200 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
201 /* MODE is used in the comparison, so it should be in the hash. */
202 h ^= GET_MODE (value);
207 /* Returns nonzero if the value represented by X (really a ...)
208 is the same as that represented by Y (really a ...) */
210 const_double_htab_eq (const void *x, const void *y)
212 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
214 if (GET_MODE (a) != GET_MODE (b))
216 if (GET_MODE (a) == VOIDmode)
217 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
218 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
220 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
221 CONST_DOUBLE_REAL_VALUE (b));
224 /* Returns a hash code for X (which is really a CONST_FIXED). */
227 const_fixed_htab_hash (const void *x)
229 const_rtx const value = (const_rtx) x;
232 h = fixed_hash (CONST_FIXED_VALUE (value));
233 /* MODE is used in the comparison, so it should be in the hash. */
234 h ^= GET_MODE (value);
238 /* Returns nonzero if the value represented by X (really a ...)
239 is the same as that represented by Y (really a ...). */
242 const_fixed_htab_eq (const void *x, const void *y)
244 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
246 if (GET_MODE (a) != GET_MODE (b))
248 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
251 /* Returns a hash code for X (which is a really a mem_attrs *). */
254 mem_attrs_htab_hash (const void *x)
256 const mem_attrs *const p = (const mem_attrs *) x;
258 return (p->alias ^ (p->align * 1000)
259 ^ (p->addrspace * 4000)
260 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
261 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
262 ^ (size_t) iterative_hash_expr (p->expr, 0));
265 /* Returns nonzero if the value represented by X (which is really a
266 mem_attrs *) is the same as that given by Y (which is also really a
270 mem_attrs_htab_eq (const void *x, const void *y)
272 const mem_attrs *const p = (const mem_attrs *) x;
273 const mem_attrs *const q = (const mem_attrs *) y;
275 return (p->alias == q->alias && p->offset == q->offset
276 && p->size == q->size && p->align == q->align
277 && p->addrspace == q->addrspace
278 && (p->expr == q->expr
279 || (p->expr != NULL_TREE && q->expr != NULL_TREE
280 && operand_equal_p (p->expr, q->expr, 0))));
283 /* Allocate a new mem_attrs structure and insert it into the hash table if
284 one identical to it is not already in the table. We are doing this for
288 get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
289 unsigned int align, addr_space_t addrspace, enum machine_mode mode)
294 /* If everything is the default, we can just return zero.
295 This must match what the corresponding MEM_* macros return when the
296 field is not present. */
297 if (alias == 0 && expr == 0 && offset == 0 && addrspace == 0
299 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
300 && (STRICT_ALIGNMENT && mode != BLKmode
301 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
306 attrs.offset = offset;
309 attrs.addrspace = addrspace;
311 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
314 *slot = ggc_alloc_mem_attrs ();
315 memcpy (*slot, &attrs, sizeof (mem_attrs));
318 return (mem_attrs *) *slot;
321 /* Returns a hash code for X (which is a really a reg_attrs *). */
324 reg_attrs_htab_hash (const void *x)
326 const reg_attrs *const p = (const reg_attrs *) x;
328 return ((p->offset * 1000) ^ (long) p->decl);
331 /* Returns nonzero if the value represented by X (which is really a
332 reg_attrs *) is the same as that given by Y (which is also really a
336 reg_attrs_htab_eq (const void *x, const void *y)
338 const reg_attrs *const p = (const reg_attrs *) x;
339 const reg_attrs *const q = (const reg_attrs *) y;
341 return (p->decl == q->decl && p->offset == q->offset);
343 /* Allocate a new reg_attrs structure and insert it into the hash table if
344 one identical to it is not already in the table. We are doing this for
348 get_reg_attrs (tree decl, int offset)
353 /* If everything is the default, we can just return zero. */
354 if (decl == 0 && offset == 0)
358 attrs.offset = offset;
360 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
363 *slot = ggc_alloc_reg_attrs ();
364 memcpy (*slot, &attrs, sizeof (reg_attrs));
367 return (reg_attrs *) *slot;
372 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
378 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
379 MEM_VOLATILE_P (x) = true;
385 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
386 don't attempt to share with the various global pieces of rtl (such as
387 frame_pointer_rtx). */
390 gen_raw_REG (enum machine_mode mode, int regno)
392 rtx x = gen_rtx_raw_REG (mode, regno);
393 ORIGINAL_REGNO (x) = regno;
397 /* There are some RTL codes that require special attention; the generation
398 functions do the raw handling. If you add to this list, modify
399 special_rtx in gengenrtl.c as well. */
402 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
406 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
407 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
409 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
410 if (const_true_rtx && arg == STORE_FLAG_VALUE)
411 return const_true_rtx;
414 /* Look up the CONST_INT in the hash table. */
415 slot = htab_find_slot_with_hash (const_int_htab, &arg,
416 (hashval_t) arg, INSERT);
418 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
424 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
426 return GEN_INT (trunc_int_for_mode (c, mode));
429 /* CONST_DOUBLEs might be created from pairs of integers, or from
430 REAL_VALUE_TYPEs. Also, their length is known only at run time,
431 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
433 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
434 hash table. If so, return its counterpart; otherwise add it
435 to the hash table and return it. */
437 lookup_const_double (rtx real)
439 void **slot = htab_find_slot (const_double_htab, real, INSERT);
446 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
447 VALUE in mode MODE. */
449 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
451 rtx real = rtx_alloc (CONST_DOUBLE);
452 PUT_MODE (real, mode);
456 return lookup_const_double (real);
459 /* Determine whether FIXED, a CONST_FIXED, already exists in the
460 hash table. If so, return its counterpart; otherwise add it
461 to the hash table and return it. */
464 lookup_const_fixed (rtx fixed)
466 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
473 /* Return a CONST_FIXED rtx for a fixed-point value specified by
474 VALUE in mode MODE. */
477 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
479 rtx fixed = rtx_alloc (CONST_FIXED);
480 PUT_MODE (fixed, mode);
484 return lookup_const_fixed (fixed);
487 /* Constructs double_int from rtx CST. */
490 rtx_to_double_int (const_rtx cst)
494 if (CONST_INT_P (cst))
495 r = shwi_to_double_int (INTVAL (cst));
496 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
498 r.low = CONST_DOUBLE_LOW (cst);
499 r.high = CONST_DOUBLE_HIGH (cst);
508 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
512 immed_double_int_const (double_int i, enum machine_mode mode)
514 return immed_double_const (i.low, i.high, mode);
517 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
518 of ints: I0 is the low-order word and I1 is the high-order word.
519 Do not use this routine for non-integer modes; convert to
520 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
523 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
528 /* There are the following cases (note that there are no modes with
529 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
531 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
533 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
534 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
535 from copies of the sign bit, and sign of i0 and i1 are the same), then
536 we return a CONST_INT for i0.
537 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
538 if (mode != VOIDmode)
540 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
541 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
542 /* We can get a 0 for an error mark. */
543 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
544 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
546 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
547 return gen_int_mode (i0, mode);
549 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
552 /* If this integer fits in one word, return a CONST_INT. */
553 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
556 /* We use VOIDmode for integers. */
557 value = rtx_alloc (CONST_DOUBLE);
558 PUT_MODE (value, VOIDmode);
560 CONST_DOUBLE_LOW (value) = i0;
561 CONST_DOUBLE_HIGH (value) = i1;
563 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
564 XWINT (value, i) = 0;
566 return lookup_const_double (value);
570 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
572 /* In case the MD file explicitly references the frame pointer, have
573 all such references point to the same frame pointer. This is
574 used during frame pointer elimination to distinguish the explicit
575 references to these registers from pseudos that happened to be
578 If we have eliminated the frame pointer or arg pointer, we will
579 be using it as a normal register, for example as a spill
580 register. In such cases, we might be accessing it in a mode that
581 is not Pmode and therefore cannot use the pre-allocated rtx.
583 Also don't do this when we are making new REGs in reload, since
584 we don't want to get confused with the real pointers. */
586 if (mode == Pmode && !reload_in_progress)
588 if (regno == FRAME_POINTER_REGNUM
589 && (!reload_completed || frame_pointer_needed))
590 return frame_pointer_rtx;
591 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
592 if (regno == HARD_FRAME_POINTER_REGNUM
593 && (!reload_completed || frame_pointer_needed))
594 return hard_frame_pointer_rtx;
596 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
597 if (regno == ARG_POINTER_REGNUM)
598 return arg_pointer_rtx;
600 #ifdef RETURN_ADDRESS_POINTER_REGNUM
601 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
602 return return_address_pointer_rtx;
604 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
605 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
606 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
607 return pic_offset_table_rtx;
608 if (regno == STACK_POINTER_REGNUM)
609 return stack_pointer_rtx;
613 /* If the per-function register table has been set up, try to re-use
614 an existing entry in that table to avoid useless generation of RTL.
616 This code is disabled for now until we can fix the various backends
617 which depend on having non-shared hard registers in some cases. Long
618 term we want to re-enable this code as it can significantly cut down
619 on the amount of useless RTL that gets generated.
621 We'll also need to fix some code that runs after reload that wants to
622 set ORIGINAL_REGNO. */
627 && regno < FIRST_PSEUDO_REGISTER
628 && reg_raw_mode[regno] == mode)
629 return regno_reg_rtx[regno];
632 return gen_raw_REG (mode, regno);
636 gen_rtx_MEM (enum machine_mode mode, rtx addr)
638 rtx rt = gen_rtx_raw_MEM (mode, addr);
640 /* This field is not cleared by the mere allocation of the rtx, so
647 /* Generate a memory referring to non-trapping constant memory. */
650 gen_const_mem (enum machine_mode mode, rtx addr)
652 rtx mem = gen_rtx_MEM (mode, addr);
653 MEM_READONLY_P (mem) = 1;
654 MEM_NOTRAP_P (mem) = 1;
658 /* Generate a MEM referring to fixed portions of the frame, e.g., register
662 gen_frame_mem (enum machine_mode mode, rtx addr)
664 rtx mem = gen_rtx_MEM (mode, addr);
665 MEM_NOTRAP_P (mem) = 1;
666 set_mem_alias_set (mem, get_frame_alias_set ());
670 /* Generate a MEM referring to a temporary use of the stack, not part
671 of the fixed stack frame. For example, something which is pushed
672 by a target splitter. */
674 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
676 rtx mem = gen_rtx_MEM (mode, addr);
677 MEM_NOTRAP_P (mem) = 1;
678 if (!cfun->calls_alloca)
679 set_mem_alias_set (mem, get_frame_alias_set ());
683 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
684 this construct would be valid, and false otherwise. */
687 validate_subreg (enum machine_mode omode, enum machine_mode imode,
688 const_rtx reg, unsigned int offset)
690 unsigned int isize = GET_MODE_SIZE (imode);
691 unsigned int osize = GET_MODE_SIZE (omode);
693 /* All subregs must be aligned. */
694 if (offset % osize != 0)
697 /* The subreg offset cannot be outside the inner object. */
701 /* ??? This should not be here. Temporarily continue to allow word_mode
702 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
703 Generally, backends are doing something sketchy but it'll take time to
705 if (omode == word_mode)
707 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
708 is the culprit here, and not the backends. */
709 else if (osize >= UNITS_PER_WORD && isize >= osize)
711 /* Allow component subregs of complex and vector. Though given the below
712 extraction rules, it's not always clear what that means. */
713 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
714 && GET_MODE_INNER (imode) == omode)
716 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
717 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
718 represent this. It's questionable if this ought to be represented at
719 all -- why can't this all be hidden in post-reload splitters that make
720 arbitrarily mode changes to the registers themselves. */
721 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
723 /* Subregs involving floating point modes are not allowed to
724 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
725 (subreg:SI (reg:DF) 0) isn't. */
726 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
732 /* Paradoxical subregs must have offset zero. */
736 /* This is a normal subreg. Verify that the offset is representable. */
738 /* For hard registers, we already have most of these rules collected in
739 subreg_offset_representable_p. */
740 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
742 unsigned int regno = REGNO (reg);
744 #ifdef CANNOT_CHANGE_MODE_CLASS
745 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
746 && GET_MODE_INNER (imode) == omode)
748 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
752 return subreg_offset_representable_p (regno, imode, offset, omode);
755 /* For pseudo registers, we want most of the same checks. Namely:
756 If the register no larger than a word, the subreg must be lowpart.
757 If the register is larger than a word, the subreg must be the lowpart
758 of a subword. A subreg does *not* perform arbitrary bit extraction.
759 Given that we've already checked mode/offset alignment, we only have
760 to check subword subregs here. */
761 if (osize < UNITS_PER_WORD)
763 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
764 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
765 if (offset % UNITS_PER_WORD != low_off)
772 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
774 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
775 return gen_rtx_raw_SUBREG (mode, reg, offset);
778 /* Generate a SUBREG representing the least-significant part of REG if MODE
779 is smaller than mode of REG, otherwise paradoxical SUBREG. */
782 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
784 enum machine_mode inmode;
786 inmode = GET_MODE (reg);
787 if (inmode == VOIDmode)
789 return gen_rtx_SUBREG (mode, reg,
790 subreg_lowpart_offset (mode, inmode));
794 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
797 gen_rtvec (int n, ...)
805 /* Don't allocate an empty rtvec... */
809 rt_val = rtvec_alloc (n);
811 for (i = 0; i < n; i++)
812 rt_val->elem[i] = va_arg (p, rtx);
819 gen_rtvec_v (int n, rtx *argp)
824 /* Don't allocate an empty rtvec... */
828 rt_val = rtvec_alloc (n);
830 for (i = 0; i < n; i++)
831 rt_val->elem[i] = *argp++;
836 /* Return the number of bytes between the start of an OUTER_MODE
837 in-memory value and the start of an INNER_MODE in-memory value,
838 given that the former is a lowpart of the latter. It may be a
839 paradoxical lowpart, in which case the offset will be negative
840 on big-endian targets. */
843 byte_lowpart_offset (enum machine_mode outer_mode,
844 enum machine_mode inner_mode)
846 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
847 return subreg_lowpart_offset (outer_mode, inner_mode);
849 return -subreg_lowpart_offset (inner_mode, outer_mode);
852 /* Generate a REG rtx for a new pseudo register of mode MODE.
853 This pseudo is assigned the next sequential register number. */
856 gen_reg_rtx (enum machine_mode mode)
859 unsigned int align = GET_MODE_ALIGNMENT (mode);
861 gcc_assert (can_create_pseudo_p ());
863 /* If a virtual register with bigger mode alignment is generated,
864 increase stack alignment estimation because it might be spilled
866 if (SUPPORTS_STACK_ALIGNMENT
867 && crtl->stack_alignment_estimated < align
868 && !crtl->stack_realign_processed)
870 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
871 if (crtl->stack_alignment_estimated < min_align)
872 crtl->stack_alignment_estimated = min_align;
875 if (generating_concat_p
876 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
877 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
879 /* For complex modes, don't make a single pseudo.
880 Instead, make a CONCAT of two pseudos.
881 This allows noncontiguous allocation of the real and imaginary parts,
882 which makes much better code. Besides, allocating DCmode
883 pseudos overstrains reload on some machines like the 386. */
884 rtx realpart, imagpart;
885 enum machine_mode partmode = GET_MODE_INNER (mode);
887 realpart = gen_reg_rtx (partmode);
888 imagpart = gen_reg_rtx (partmode);
889 return gen_rtx_CONCAT (mode, realpart, imagpart);
892 /* Make sure regno_pointer_align, and regno_reg_rtx are large
893 enough to have an element for this pseudo reg number. */
895 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
897 int old_size = crtl->emit.regno_pointer_align_length;
901 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
902 memset (tmp + old_size, 0, old_size);
903 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
905 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
906 memset (new1 + old_size, 0, old_size * sizeof (rtx));
907 regno_reg_rtx = new1;
909 crtl->emit.regno_pointer_align_length = old_size * 2;
912 val = gen_raw_REG (mode, reg_rtx_no);
913 regno_reg_rtx[reg_rtx_no++] = val;
917 /* Update NEW with the same attributes as REG, but with OFFSET added
918 to the REG_OFFSET. */
921 update_reg_offset (rtx new_rtx, rtx reg, int offset)
923 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
924 REG_OFFSET (reg) + offset);
927 /* Generate a register with same attributes as REG, but with OFFSET
928 added to the REG_OFFSET. */
931 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
934 rtx new_rtx = gen_rtx_REG (mode, regno);
936 update_reg_offset (new_rtx, reg, offset);
940 /* Generate a new pseudo-register with the same attributes as REG, but
941 with OFFSET added to the REG_OFFSET. */
944 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
946 rtx new_rtx = gen_reg_rtx (mode);
948 update_reg_offset (new_rtx, reg, offset);
952 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
953 new register is a (possibly paradoxical) lowpart of the old one. */
956 adjust_reg_mode (rtx reg, enum machine_mode mode)
958 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
959 PUT_MODE (reg, mode);
962 /* Copy REG's attributes from X, if X has any attributes. If REG and X
963 have different modes, REG is a (possibly paradoxical) lowpart of X. */
966 set_reg_attrs_from_value (rtx reg, rtx x)
970 /* Hard registers can be reused for multiple purposes within the same
971 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
973 if (HARD_REGISTER_P (reg))
976 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
979 if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
981 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
983 mark_reg_pointer (reg, 0);
988 update_reg_offset (reg, x, offset);
990 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
994 /* Generate a REG rtx for a new pseudo register, copying the mode
995 and attributes from X. */
998 gen_reg_rtx_and_attrs (rtx x)
1000 rtx reg = gen_reg_rtx (GET_MODE (x));
1001 set_reg_attrs_from_value (reg, x);
1005 /* Set the register attributes for registers contained in PARM_RTX.
1006 Use needed values from memory attributes of MEM. */
1009 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1011 if (REG_P (parm_rtx))
1012 set_reg_attrs_from_value (parm_rtx, mem);
1013 else if (GET_CODE (parm_rtx) == PARALLEL)
1015 /* Check for a NULL entry in the first slot, used to indicate that the
1016 parameter goes both on the stack and in registers. */
1017 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1018 for (; i < XVECLEN (parm_rtx, 0); i++)
1020 rtx x = XVECEXP (parm_rtx, 0, i);
1021 if (REG_P (XEXP (x, 0)))
1022 REG_ATTRS (XEXP (x, 0))
1023 = get_reg_attrs (MEM_EXPR (mem),
1024 INTVAL (XEXP (x, 1)));
1029 /* Set the REG_ATTRS for registers in value X, given that X represents
1033 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1035 if (GET_CODE (x) == SUBREG)
1037 gcc_assert (subreg_lowpart_p (x));
1042 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1044 if (GET_CODE (x) == CONCAT)
1046 if (REG_P (XEXP (x, 0)))
1047 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1048 if (REG_P (XEXP (x, 1)))
1049 REG_ATTRS (XEXP (x, 1))
1050 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1052 if (GET_CODE (x) == PARALLEL)
1056 /* Check for a NULL entry, used to indicate that the parameter goes
1057 both on the stack and in registers. */
1058 if (XEXP (XVECEXP (x, 0, 0), 0))
1063 for (i = start; i < XVECLEN (x, 0); i++)
1065 rtx y = XVECEXP (x, 0, i);
1066 if (REG_P (XEXP (y, 0)))
1067 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1072 /* Assign the RTX X to declaration T. */
1075 set_decl_rtl (tree t, rtx x)
1077 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1079 set_reg_attrs_for_decl_rtl (t, x);
1082 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1083 if the ABI requires the parameter to be passed by reference. */
1086 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1088 DECL_INCOMING_RTL (t) = x;
1089 if (x && !by_reference_p)
1090 set_reg_attrs_for_decl_rtl (t, x);
1093 /* Identify REG (which may be a CONCAT) as a user register. */
1096 mark_user_reg (rtx reg)
1098 if (GET_CODE (reg) == CONCAT)
1100 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1101 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1105 gcc_assert (REG_P (reg));
1106 REG_USERVAR_P (reg) = 1;
1110 /* Identify REG as a probable pointer register and show its alignment
1111 as ALIGN, if nonzero. */
1114 mark_reg_pointer (rtx reg, int align)
1116 if (! REG_POINTER (reg))
1118 REG_POINTER (reg) = 1;
1121 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1123 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1124 /* We can no-longer be sure just how aligned this pointer is. */
1125 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1128 /* Return 1 plus largest pseudo reg number used in the current function. */
1136 /* Return 1 + the largest label number used so far in the current function. */
1139 max_label_num (void)
1144 /* Return first label number used in this function (if any were used). */
1147 get_first_label_num (void)
1149 return first_label_num;
1152 /* If the rtx for label was created during the expansion of a nested
1153 function, then first_label_num won't include this label number.
1154 Fix this now so that array indices work later. */
1157 maybe_set_first_label_num (rtx x)
1159 if (CODE_LABEL_NUMBER (x) < first_label_num)
1160 first_label_num = CODE_LABEL_NUMBER (x);
1163 /* Return a value representing some low-order bits of X, where the number
1164 of low-order bits is given by MODE. Note that no conversion is done
1165 between floating-point and fixed-point values, rather, the bit
1166 representation is returned.
1168 This function handles the cases in common between gen_lowpart, below,
1169 and two variants in cse.c and combine.c. These are the cases that can
1170 be safely handled at all points in the compilation.
1172 If this is not a case we can handle, return 0. */
1175 gen_lowpart_common (enum machine_mode mode, rtx x)
1177 int msize = GET_MODE_SIZE (mode);
1180 enum machine_mode innermode;
1182 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1183 so we have to make one up. Yuk. */
1184 innermode = GET_MODE (x);
1186 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1187 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1188 else if (innermode == VOIDmode)
1189 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1191 xsize = GET_MODE_SIZE (innermode);
1193 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1195 if (innermode == mode)
1198 /* MODE must occupy no more words than the mode of X. */
1199 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1200 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1203 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1204 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1207 offset = subreg_lowpart_offset (mode, innermode);
1209 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1210 && (GET_MODE_CLASS (mode) == MODE_INT
1211 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1213 /* If we are getting the low-order part of something that has been
1214 sign- or zero-extended, we can either just use the object being
1215 extended or make a narrower extension. If we want an even smaller
1216 piece than the size of the object being extended, call ourselves
1219 This case is used mostly by combine and cse. */
1221 if (GET_MODE (XEXP (x, 0)) == mode)
1223 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1224 return gen_lowpart_common (mode, XEXP (x, 0));
1225 else if (msize < xsize)
1226 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1228 else if (GET_CODE (x) == SUBREG || REG_P (x)
1229 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1230 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
1231 return simplify_gen_subreg (mode, x, innermode, offset);
1233 /* Otherwise, we can't do this. */
1238 gen_highpart (enum machine_mode mode, rtx x)
1240 unsigned int msize = GET_MODE_SIZE (mode);
1243 /* This case loses if X is a subreg. To catch bugs early,
1244 complain if an invalid MODE is used even in other cases. */
1245 gcc_assert (msize <= UNITS_PER_WORD
1246 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1248 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1249 subreg_highpart_offset (mode, GET_MODE (x)));
1250 gcc_assert (result);
1252 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1253 the target if we have a MEM. gen_highpart must return a valid operand,
1254 emitting code if necessary to do so. */
1257 result = validize_mem (result);
1258 gcc_assert (result);
1264 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1265 be VOIDmode constant. */
1267 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1269 if (GET_MODE (exp) != VOIDmode)
1271 gcc_assert (GET_MODE (exp) == innermode);
1272 return gen_highpart (outermode, exp);
1274 return simplify_gen_subreg (outermode, exp, innermode,
1275 subreg_highpart_offset (outermode, innermode));
1278 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1281 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1283 unsigned int offset = 0;
1284 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1288 if (WORDS_BIG_ENDIAN)
1289 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1290 if (BYTES_BIG_ENDIAN)
1291 offset += difference % UNITS_PER_WORD;
1297 /* Return offset in bytes to get OUTERMODE high part
1298 of the value in mode INNERMODE stored in memory in target format. */
1300 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1302 unsigned int offset = 0;
1303 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1305 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1309 if (! WORDS_BIG_ENDIAN)
1310 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1311 if (! BYTES_BIG_ENDIAN)
1312 offset += difference % UNITS_PER_WORD;
1318 /* Return 1 iff X, assumed to be a SUBREG,
1319 refers to the least significant part of its containing reg.
1320 If X is not a SUBREG, always return 1 (it is its own low part!). */
1323 subreg_lowpart_p (const_rtx x)
1325 if (GET_CODE (x) != SUBREG)
1327 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1330 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1331 == SUBREG_BYTE (x));
1334 /* Return subword OFFSET of operand OP.
1335 The word number, OFFSET, is interpreted as the word number starting
1336 at the low-order address. OFFSET 0 is the low-order word if not
1337 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1339 If we cannot extract the required word, we return zero. Otherwise,
1340 an rtx corresponding to the requested word will be returned.
1342 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1343 reload has completed, a valid address will always be returned. After
1344 reload, if a valid address cannot be returned, we return zero.
1346 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1347 it is the responsibility of the caller.
1349 MODE is the mode of OP in case it is a CONST_INT.
1351 ??? This is still rather broken for some cases. The problem for the
1352 moment is that all callers of this thing provide no 'goal mode' to
1353 tell us to work with. This exists because all callers were written
1354 in a word based SUBREG world.
1355 Now use of this function can be deprecated by simplify_subreg in most
1360 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1362 if (mode == VOIDmode)
1363 mode = GET_MODE (op);
1365 gcc_assert (mode != VOIDmode);
1367 /* If OP is narrower than a word, fail. */
1369 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1372 /* If we want a word outside OP, return zero. */
1374 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1377 /* Form a new MEM at the requested address. */
1380 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1382 if (! validate_address)
1385 else if (reload_completed)
1387 if (! strict_memory_address_addr_space_p (word_mode,
1389 MEM_ADDR_SPACE (op)))
1393 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1396 /* Rest can be handled by simplify_subreg. */
1397 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1400 /* Similar to `operand_subword', but never return 0. If we can't
1401 extract the required subword, put OP into a register and try again.
1402 The second attempt must succeed. We always validate the address in
1405 MODE is the mode of OP, in case it is CONST_INT. */
1408 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1410 rtx result = operand_subword (op, offset, 1, mode);
1415 if (mode != BLKmode && mode != VOIDmode)
1417 /* If this is a register which can not be accessed by words, copy it
1418 to a pseudo register. */
1420 op = copy_to_reg (op);
1422 op = force_reg (mode, op);
1425 result = operand_subword (op, offset, 1, mode);
1426 gcc_assert (result);
1431 /* Returns 1 if both MEM_EXPR can be considered equal
1435 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1440 if (! expr1 || ! expr2)
1443 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1446 return operand_equal_p (expr1, expr2, 0);
1449 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1450 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1454 get_mem_align_offset (rtx mem, unsigned int align)
1457 unsigned HOST_WIDE_INT offset;
1459 /* This function can't use
1460 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1461 || !CONST_INT_P (MEM_OFFSET (mem))
1462 || (MAX (MEM_ALIGN (mem),
1463 get_object_alignment (MEM_EXPR (mem), align))
1467 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1469 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1470 for <variable>. get_inner_reference doesn't handle it and
1471 even if it did, the alignment in that case needs to be determined
1472 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1473 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1474 isn't sufficiently aligned, the object it is in might be. */
1475 gcc_assert (MEM_P (mem));
1476 expr = MEM_EXPR (mem);
1477 if (expr == NULL_TREE
1478 || MEM_OFFSET (mem) == NULL_RTX
1479 || !CONST_INT_P (MEM_OFFSET (mem)))
1482 offset = INTVAL (MEM_OFFSET (mem));
1485 if (DECL_ALIGN (expr) < align)
1488 else if (INDIRECT_REF_P (expr))
1490 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1493 else if (TREE_CODE (expr) == COMPONENT_REF)
1497 tree inner = TREE_OPERAND (expr, 0);
1498 tree field = TREE_OPERAND (expr, 1);
1499 tree byte_offset = component_ref_field_offset (expr);
1500 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1503 || !host_integerp (byte_offset, 1)
1504 || !host_integerp (bit_offset, 1))
1507 offset += tree_low_cst (byte_offset, 1);
1508 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1510 if (inner == NULL_TREE)
1512 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1513 < (unsigned int) align)
1517 else if (DECL_P (inner))
1519 if (DECL_ALIGN (inner) < align)
1523 else if (TREE_CODE (inner) != COMPONENT_REF)
1531 return offset & ((align / BITS_PER_UNIT) - 1);
1534 /* Given REF (a MEM) and T, either the type of X or the expression
1535 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1536 if we are making a new object of this type. BITPOS is nonzero if
1537 there is an offset outstanding on T that will be applied later. */
1540 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1541 HOST_WIDE_INT bitpos)
1543 alias_set_type alias;
1545 rtx offset = NULL_RTX;
1546 rtx size = NULL_RTX;
1547 unsigned int align = BITS_PER_UNIT;
1548 HOST_WIDE_INT apply_bitpos = 0;
1551 /* It can happen that type_for_mode was given a mode for which there
1552 is no language-level type. In which case it returns NULL, which
1557 type = TYPE_P (t) ? t : TREE_TYPE (t);
1558 if (type == error_mark_node)
1561 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1562 wrong answer, as it assumes that DECL_RTL already has the right alias
1563 info. Callers should not set DECL_RTL until after the call to
1564 set_mem_attributes. */
1565 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1567 /* Get the alias set from the expression or type (perhaps using a
1568 front-end routine) and use it. */
1569 alias = get_alias_set (t);
1571 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1572 MEM_IN_STRUCT_P (ref)
1573 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
1574 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1576 /* If we are making an object of this type, or if this is a DECL, we know
1577 that it is a scalar if the type is not an aggregate. */
1578 if ((objectp || DECL_P (t))
1579 && ! AGGREGATE_TYPE_P (type)
1580 && TREE_CODE (type) != COMPLEX_TYPE)
1581 MEM_SCALAR_P (ref) = 1;
1583 /* Default values from pre-existing memory attributes if present. */
1584 if (MEM_ATTRS (ref))
1586 /* ??? Can this ever happen? Calling this routine on a MEM that
1587 already carries memory attributes should probably be invalid. */
1588 expr = MEM_EXPR (ref);
1589 offset = MEM_OFFSET (ref);
1590 size = MEM_SIZE (ref);
1591 align = MEM_ALIGN (ref);
1594 /* Otherwise, default values from the mode of the MEM reference. */
1595 else if (GET_MODE (ref) != BLKmode)
1597 /* Respect mode size. */
1598 size = GEN_INT (GET_MODE_SIZE (GET_MODE (ref)));
1599 /* ??? Is this really necessary? We probably should always get
1600 the size from the type below. */
1602 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1603 if T is an object, always compute the object alignment below. */
1604 if (STRICT_ALIGNMENT && TYPE_P (t))
1605 align = GET_MODE_ALIGNMENT (GET_MODE (ref));
1606 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1607 e.g. if the type carries an alignment attribute. Should we be
1608 able to simply always use TYPE_ALIGN? */
1611 /* We can set the alignment from the type if we are making an object,
1612 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1613 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1614 align = MAX (align, TYPE_ALIGN (type));
1616 else if (TREE_CODE (t) == MEM_REF)
1618 tree op0 = TREE_OPERAND (t, 0);
1619 if (TREE_CODE (op0) == ADDR_EXPR
1620 && (DECL_P (TREE_OPERAND (op0, 0))
1621 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
1623 if (DECL_P (TREE_OPERAND (op0, 0)))
1624 align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1625 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1627 align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
1628 #ifdef CONSTANT_ALIGNMENT
1629 align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0), align);
1632 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1634 unsigned HOST_WIDE_INT ioff
1635 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1636 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1637 align = MIN (aoff, align);
1641 /* ??? This isn't fully correct, we can't set the alignment from the
1642 type in all cases. */
1643 align = MAX (align, TYPE_ALIGN (type));
1646 else if (TREE_CODE (t) == TARGET_MEM_REF)
1647 /* ??? This isn't fully correct, we can't set the alignment from the
1648 type in all cases. */
1649 align = MAX (align, TYPE_ALIGN (type));
1651 /* If the size is known, we can set that. */
1652 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1653 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1655 /* If T is not a type, we may be able to deduce some more information about
1660 bool align_computed = false;
1662 if (TREE_THIS_VOLATILE (t))
1663 MEM_VOLATILE_P (ref) = 1;
1665 /* Now remove any conversions: they don't change what the underlying
1666 object is. Likewise for SAVE_EXPR. */
1667 while (CONVERT_EXPR_P (t)
1668 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1669 || TREE_CODE (t) == SAVE_EXPR)
1670 t = TREE_OPERAND (t, 0);
1672 /* We may look through structure-like accesses for the purposes of
1673 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1675 while (TREE_CODE (base) == COMPONENT_REF
1676 || TREE_CODE (base) == REALPART_EXPR
1677 || TREE_CODE (base) == IMAGPART_EXPR
1678 || TREE_CODE (base) == BIT_FIELD_REF)
1679 base = TREE_OPERAND (base, 0);
1681 if (TREE_CODE (base) == MEM_REF
1682 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1683 base = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1686 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1687 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1689 MEM_NOTRAP_P (ref) = 1;
1691 else if (TREE_CODE (base) == INDIRECT_REF
1692 || TREE_CODE (base) == MEM_REF
1693 || TREE_CODE (base) == TARGET_MEM_REF
1694 || TREE_CODE (base) == ARRAY_REF
1695 || TREE_CODE (base) == ARRAY_RANGE_REF)
1696 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1698 base = get_base_address (base);
1699 if (base && DECL_P (base)
1700 && TREE_READONLY (base)
1701 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1702 MEM_READONLY_P (ref) = 1;
1704 /* If this expression uses it's parent's alias set, mark it such
1705 that we won't change it. */
1706 if (component_uses_parent_alias_set (t))
1707 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1709 /* If this is a decl, set the attributes of the MEM from it. */
1713 offset = const0_rtx;
1714 apply_bitpos = bitpos;
1715 size = (DECL_SIZE_UNIT (t)
1716 && host_integerp (DECL_SIZE_UNIT (t), 1)
1717 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1718 align = DECL_ALIGN (t);
1719 align_computed = true;
1722 /* If this is a constant, we know the alignment. */
1723 else if (CONSTANT_CLASS_P (t))
1725 align = TYPE_ALIGN (type);
1726 #ifdef CONSTANT_ALIGNMENT
1727 align = CONSTANT_ALIGNMENT (t, align);
1729 align_computed = true;
1732 /* If this is a field reference and not a bit-field, record it. */
1733 /* ??? There is some information that can be gleaned from bit-fields,
1734 such as the word offset in the structure that might be modified.
1735 But skip it for now. */
1736 else if (TREE_CODE (t) == COMPONENT_REF
1737 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1740 offset = const0_rtx;
1741 apply_bitpos = bitpos;
1742 /* ??? Any reason the field size would be different than
1743 the size we got from the type? */
1746 /* If this is an array reference, look for an outer field reference. */
1747 else if (TREE_CODE (t) == ARRAY_REF)
1749 tree off_tree = size_zero_node;
1750 /* We can't modify t, because we use it at the end of the
1756 tree index = TREE_OPERAND (t2, 1);
1757 tree low_bound = array_ref_low_bound (t2);
1758 tree unit_size = array_ref_element_size (t2);
1760 /* We assume all arrays have sizes that are a multiple of a byte.
1761 First subtract the lower bound, if any, in the type of the
1762 index, then convert to sizetype and multiply by the size of
1763 the array element. */
1764 if (! integer_zerop (low_bound))
1765 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1768 off_tree = size_binop (PLUS_EXPR,
1769 size_binop (MULT_EXPR,
1770 fold_convert (sizetype,
1774 t2 = TREE_OPERAND (t2, 0);
1776 while (TREE_CODE (t2) == ARRAY_REF);
1782 if (host_integerp (off_tree, 1))
1784 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1785 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1786 align = DECL_ALIGN (t2);
1787 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1789 align_computed = true;
1790 offset = GEN_INT (ioff);
1791 apply_bitpos = bitpos;
1794 else if (TREE_CODE (t2) == COMPONENT_REF)
1798 if (host_integerp (off_tree, 1))
1800 offset = GEN_INT (tree_low_cst (off_tree, 1));
1801 apply_bitpos = bitpos;
1803 /* ??? Any reason the field size would be different than
1804 the size we got from the type? */
1807 /* If this is an indirect reference, record it. */
1808 else if (TREE_CODE (t) == MEM_REF)
1811 offset = const0_rtx;
1812 apply_bitpos = bitpos;
1816 /* If this is an indirect reference, record it. */
1817 else if (TREE_CODE (t) == MEM_REF
1818 || TREE_CODE (t) == TARGET_MEM_REF)
1821 offset = const0_rtx;
1822 apply_bitpos = bitpos;
1825 if (!align_computed && !INDIRECT_REF_P (t))
1827 unsigned int obj_align = get_object_alignment (t, BIGGEST_ALIGNMENT);
1828 align = MAX (align, obj_align);
1832 /* If we modified OFFSET based on T, then subtract the outstanding
1833 bit position offset. Similarly, increase the size of the accessed
1834 object to contain the negative offset. */
1837 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1839 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1842 /* Now set the attributes we computed above. */
1844 = get_mem_attrs (alias, expr, offset, size, align,
1845 TYPE_ADDR_SPACE (type), GET_MODE (ref));
1847 /* If this is already known to be a scalar or aggregate, we are done. */
1848 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1851 /* If it is a reference into an aggregate, this is part of an aggregate.
1852 Otherwise we don't know. */
1853 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1854 || TREE_CODE (t) == ARRAY_RANGE_REF
1855 || TREE_CODE (t) == BIT_FIELD_REF)
1856 MEM_IN_STRUCT_P (ref) = 1;
1860 set_mem_attributes (rtx ref, tree t, int objectp)
1862 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1865 /* Set the alias set of MEM to SET. */
1868 set_mem_alias_set (rtx mem, alias_set_type set)
1870 /* If the new and old alias sets don't conflict, something is wrong. */
1871 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1873 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1874 MEM_SIZE (mem), MEM_ALIGN (mem),
1875 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1878 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1881 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1883 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1884 MEM_OFFSET (mem), MEM_SIZE (mem),
1885 MEM_ALIGN (mem), addrspace, GET_MODE (mem));
1888 /* Set the alignment of MEM to ALIGN bits. */
1891 set_mem_align (rtx mem, unsigned int align)
1893 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1894 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1895 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1898 /* Set the expr for MEM to EXPR. */
1901 set_mem_expr (rtx mem, tree expr)
1904 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1905 MEM_SIZE (mem), MEM_ALIGN (mem),
1906 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1909 /* Set the offset of MEM to OFFSET. */
1912 set_mem_offset (rtx mem, rtx offset)
1914 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1915 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1916 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1919 /* Set the size of MEM to SIZE. */
1922 set_mem_size (rtx mem, rtx size)
1924 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1925 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1926 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1929 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1930 and its address changed to ADDR. (VOIDmode means don't change the mode.
1931 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1932 returned memory location is required to be valid. The memory
1933 attributes are not changed. */
1936 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1941 gcc_assert (MEM_P (memref));
1942 as = MEM_ADDR_SPACE (memref);
1943 if (mode == VOIDmode)
1944 mode = GET_MODE (memref);
1946 addr = XEXP (memref, 0);
1947 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1948 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1953 if (reload_in_progress || reload_completed)
1954 gcc_assert (memory_address_addr_space_p (mode, addr, as));
1956 addr = memory_address_addr_space (mode, addr, as);
1959 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1962 new_rtx = gen_rtx_MEM (mode, addr);
1963 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1967 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1968 way we are changing MEMREF, so we only preserve the alias set. */
1971 change_address (rtx memref, enum machine_mode mode, rtx addr)
1973 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1974 enum machine_mode mmode = GET_MODE (new_rtx);
1977 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1978 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1980 /* If there are no changes, just return the original memory reference. */
1981 if (new_rtx == memref)
1983 if (MEM_ATTRS (memref) == 0
1984 || (MEM_EXPR (memref) == NULL
1985 && MEM_OFFSET (memref) == NULL
1986 && MEM_SIZE (memref) == size
1987 && MEM_ALIGN (memref) == align))
1990 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1991 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1995 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align,
1996 MEM_ADDR_SPACE (memref), mmode);
2001 /* Return a memory reference like MEMREF, but with its mode changed
2002 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2003 nonzero, the memory address is forced to be valid.
2004 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2005 and caller is responsible for adjusting MEMREF base register. */
2008 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2009 int validate, int adjust)
2011 rtx addr = XEXP (memref, 0);
2013 rtx memoffset = MEM_OFFSET (memref);
2015 unsigned int memalign = MEM_ALIGN (memref);
2016 addr_space_t as = MEM_ADDR_SPACE (memref);
2017 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2020 /* If there are no changes, just return the original memory reference. */
2021 if (mode == GET_MODE (memref) && !offset
2022 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2025 /* ??? Prefer to create garbage instead of creating shared rtl.
2026 This may happen even if offset is nonzero -- consider
2027 (plus (plus reg reg) const_int) -- so do this always. */
2028 addr = copy_rtx (addr);
2030 /* Convert a possibly large offset to a signed value within the
2031 range of the target address space. */
2032 pbits = GET_MODE_BITSIZE (address_mode);
2033 if (HOST_BITS_PER_WIDE_INT > pbits)
2035 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2036 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2042 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2043 object, we can merge it into the LO_SUM. */
2044 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2046 && (unsigned HOST_WIDE_INT) offset
2047 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2048 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2049 plus_constant (XEXP (addr, 1), offset));
2051 addr = plus_constant (addr, offset);
2054 new_rtx = change_address_1 (memref, mode, addr, validate);
2056 /* If the address is a REG, change_address_1 rightfully returns memref,
2057 but this would destroy memref's MEM_ATTRS. */
2058 if (new_rtx == memref && offset != 0)
2059 new_rtx = copy_rtx (new_rtx);
2061 /* Compute the new values of the memory attributes due to this adjustment.
2062 We add the offsets and update the alignment. */
2064 memoffset = GEN_INT (offset + INTVAL (memoffset));
2066 /* Compute the new alignment by taking the MIN of the alignment and the
2067 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2072 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2074 /* We can compute the size in a number of ways. */
2075 if (GET_MODE (new_rtx) != BLKmode)
2076 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
2077 else if (MEM_SIZE (memref))
2078 size = plus_constant (MEM_SIZE (memref), -offset);
2080 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2081 memoffset, size, memalign, as,
2082 GET_MODE (new_rtx));
2084 /* At some point, we should validate that this offset is within the object,
2085 if all the appropriate values are known. */
2089 /* Return a memory reference like MEMREF, but with its mode changed
2090 to MODE and its address changed to ADDR, which is assumed to be
2091 MEMREF offset by OFFSET bytes. If VALIDATE is
2092 nonzero, the memory address is forced to be valid. */
2095 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2096 HOST_WIDE_INT offset, int validate)
2098 memref = change_address_1 (memref, VOIDmode, addr, validate);
2099 return adjust_address_1 (memref, mode, offset, validate, 0);
2102 /* Return a memory reference like MEMREF, but whose address is changed by
2103 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2104 known to be in OFFSET (possibly 1). */
2107 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2109 rtx new_rtx, addr = XEXP (memref, 0);
2110 addr_space_t as = MEM_ADDR_SPACE (memref);
2111 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2113 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2115 /* At this point we don't know _why_ the address is invalid. It
2116 could have secondary memory references, multiplies or anything.
2118 However, if we did go and rearrange things, we can wind up not
2119 being able to recognize the magic around pic_offset_table_rtx.
2120 This stuff is fragile, and is yet another example of why it is
2121 bad to expose PIC machinery too early. */
2122 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, as)
2123 && GET_CODE (addr) == PLUS
2124 && XEXP (addr, 0) == pic_offset_table_rtx)
2126 addr = force_reg (GET_MODE (addr), addr);
2127 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2130 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2131 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2133 /* If there are no changes, just return the original memory reference. */
2134 if (new_rtx == memref)
2137 /* Update the alignment to reflect the offset. Reset the offset, which
2140 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2141 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2142 as, GET_MODE (new_rtx));
2146 /* Return a memory reference like MEMREF, but with its address changed to
2147 ADDR. The caller is asserting that the actual piece of memory pointed
2148 to is the same, just the form of the address is being changed, such as
2149 by putting something into a register. */
2152 replace_equiv_address (rtx memref, rtx addr)
2154 /* change_address_1 copies the memory attribute structure without change
2155 and that's exactly what we want here. */
2156 update_temp_slot_address (XEXP (memref, 0), addr);
2157 return change_address_1 (memref, VOIDmode, addr, 1);
2160 /* Likewise, but the reference is not required to be valid. */
2163 replace_equiv_address_nv (rtx memref, rtx addr)
2165 return change_address_1 (memref, VOIDmode, addr, 0);
2168 /* Return a memory reference like MEMREF, but with its mode widened to
2169 MODE and offset by OFFSET. This would be used by targets that e.g.
2170 cannot issue QImode memory operations and have to use SImode memory
2171 operations plus masking logic. */
2174 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2176 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2177 tree expr = MEM_EXPR (new_rtx);
2178 rtx memoffset = MEM_OFFSET (new_rtx);
2179 unsigned int size = GET_MODE_SIZE (mode);
2181 /* If there are no changes, just return the original memory reference. */
2182 if (new_rtx == memref)
2185 /* If we don't know what offset we were at within the expression, then
2186 we can't know if we've overstepped the bounds. */
2192 if (TREE_CODE (expr) == COMPONENT_REF)
2194 tree field = TREE_OPERAND (expr, 1);
2195 tree offset = component_ref_field_offset (expr);
2197 if (! DECL_SIZE_UNIT (field))
2203 /* Is the field at least as large as the access? If so, ok,
2204 otherwise strip back to the containing structure. */
2205 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2206 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2207 && INTVAL (memoffset) >= 0)
2210 if (! host_integerp (offset, 1))
2216 expr = TREE_OPERAND (expr, 0);
2218 = (GEN_INT (INTVAL (memoffset)
2219 + tree_low_cst (offset, 1)
2220 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2223 /* Similarly for the decl. */
2224 else if (DECL_P (expr)
2225 && DECL_SIZE_UNIT (expr)
2226 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2227 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2228 && (! memoffset || INTVAL (memoffset) >= 0))
2232 /* The widened memory access overflows the expression, which means
2233 that it could alias another expression. Zap it. */
2240 memoffset = NULL_RTX;
2242 /* The widened memory may alias other stuff, so zap the alias set. */
2243 /* ??? Maybe use get_alias_set on any remaining expression. */
2245 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2246 MEM_ALIGN (new_rtx),
2247 MEM_ADDR_SPACE (new_rtx), mode);
2252 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2253 static GTY(()) tree spill_slot_decl;
2256 get_spill_slot_decl (bool force_build_p)
2258 tree d = spill_slot_decl;
2261 if (d || !force_build_p)
2264 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2265 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2266 DECL_ARTIFICIAL (d) = 1;
2267 DECL_IGNORED_P (d) = 1;
2269 spill_slot_decl = d;
2271 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2272 MEM_NOTRAP_P (rd) = 1;
2273 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
2274 NULL_RTX, 0, ADDR_SPACE_GENERIC, BLKmode);
2275 SET_DECL_RTL (d, rd);
2280 /* Given MEM, a result from assign_stack_local, fill in the memory
2281 attributes as appropriate for a register allocator spill slot.
2282 These slots are not aliasable by other memory. We arrange for
2283 them all to use a single MEM_EXPR, so that the aliasing code can
2284 work properly in the case of shared spill slots. */
2287 set_mem_attrs_for_spill (rtx mem)
2289 alias_set_type alias;
2293 expr = get_spill_slot_decl (true);
2294 alias = MEM_ALIAS_SET (DECL_RTL (expr));
2296 /* We expect the incoming memory to be of the form:
2297 (mem:MODE (plus (reg sfp) (const_int offset)))
2298 with perhaps the plus missing for offset = 0. */
2299 addr = XEXP (mem, 0);
2300 offset = const0_rtx;
2301 if (GET_CODE (addr) == PLUS
2302 && CONST_INT_P (XEXP (addr, 1)))
2303 offset = XEXP (addr, 1);
2305 MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
2306 MEM_SIZE (mem), MEM_ALIGN (mem),
2307 ADDR_SPACE_GENERIC, GET_MODE (mem));
2308 MEM_NOTRAP_P (mem) = 1;
2311 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2314 gen_label_rtx (void)
2316 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2317 NULL, label_num++, NULL);
2320 /* For procedure integration. */
2322 /* Install new pointers to the first and last insns in the chain.
2323 Also, set cur_insn_uid to one higher than the last in use.
2324 Used for an inline-procedure after copying the insn chain. */
2327 set_new_first_and_last_insn (rtx first, rtx last)
2331 set_first_insn (first);
2332 set_last_insn (last);
2335 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2337 int debug_count = 0;
2339 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2340 cur_debug_insn_uid = 0;
2342 for (insn = first; insn; insn = NEXT_INSN (insn))
2343 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2344 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2347 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2348 if (DEBUG_INSN_P (insn))
2353 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2355 cur_debug_insn_uid++;
2358 for (insn = first; insn; insn = NEXT_INSN (insn))
2359 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2364 /* Go through all the RTL insn bodies and copy any invalid shared
2365 structure. This routine should only be called once. */
2368 unshare_all_rtl_1 (rtx insn)
2370 /* Unshare just about everything else. */
2371 unshare_all_rtl_in_chain (insn);
2373 /* Make sure the addresses of stack slots found outside the insn chain
2374 (such as, in DECL_RTL of a variable) are not shared
2375 with the insn chain.
2377 This special care is necessary when the stack slot MEM does not
2378 actually appear in the insn chain. If it does appear, its address
2379 is unshared from all else at that point. */
2380 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2383 /* Go through all the RTL insn bodies and copy any invalid shared
2384 structure, again. This is a fairly expensive thing to do so it
2385 should be done sparingly. */
2388 unshare_all_rtl_again (rtx insn)
2393 for (p = insn; p; p = NEXT_INSN (p))
2396 reset_used_flags (PATTERN (p));
2397 reset_used_flags (REG_NOTES (p));
2400 /* Make sure that virtual stack slots are not shared. */
2401 set_used_decls (DECL_INITIAL (cfun->decl));
2403 /* Make sure that virtual parameters are not shared. */
2404 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2405 set_used_flags (DECL_RTL (decl));
2407 reset_used_flags (stack_slot_list);
2409 unshare_all_rtl_1 (insn);
2413 unshare_all_rtl (void)
2415 unshare_all_rtl_1 (get_insns ());
2419 struct rtl_opt_pass pass_unshare_all_rtl =
2423 "unshare", /* name */
2425 unshare_all_rtl, /* execute */
2428 0, /* static_pass_number */
2429 TV_NONE, /* tv_id */
2430 0, /* properties_required */
2431 0, /* properties_provided */
2432 0, /* properties_destroyed */
2433 0, /* todo_flags_start */
2434 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2439 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2440 Recursively does the same for subexpressions. */
2443 verify_rtx_sharing (rtx orig, rtx insn)
2448 const char *format_ptr;
2453 code = GET_CODE (x);
2455 /* These types may be freely shared. */
2473 /* SCRATCH must be shared because they represent distinct values. */
2475 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2480 if (shared_const_p (orig))
2485 /* A MEM is allowed to be shared if its address is constant. */
2486 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2487 || reload_completed || reload_in_progress)
2496 /* This rtx may not be shared. If it has already been seen,
2497 replace it with a copy of itself. */
2498 #ifdef ENABLE_CHECKING
2499 if (RTX_FLAG (x, used))
2501 error ("invalid rtl sharing found in the insn");
2503 error ("shared rtx");
2505 internal_error ("internal consistency failure");
2508 gcc_assert (!RTX_FLAG (x, used));
2510 RTX_FLAG (x, used) = 1;
2512 /* Now scan the subexpressions recursively. */
2514 format_ptr = GET_RTX_FORMAT (code);
2516 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2518 switch (*format_ptr++)
2521 verify_rtx_sharing (XEXP (x, i), insn);
2525 if (XVEC (x, i) != NULL)
2528 int len = XVECLEN (x, i);
2530 for (j = 0; j < len; j++)
2532 /* We allow sharing of ASM_OPERANDS inside single
2534 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2535 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2537 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2539 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2548 /* Go through all the RTL insn bodies and check that there is no unexpected
2549 sharing in between the subexpressions. */
2552 verify_rtl_sharing (void)
2556 timevar_push (TV_VERIFY_RTL_SHARING);
2558 for (p = get_insns (); p; p = NEXT_INSN (p))
2561 reset_used_flags (PATTERN (p));
2562 reset_used_flags (REG_NOTES (p));
2563 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2566 rtx q, sequence = PATTERN (p);
2568 for (i = 0; i < XVECLEN (sequence, 0); i++)
2570 q = XVECEXP (sequence, 0, i);
2571 gcc_assert (INSN_P (q));
2572 reset_used_flags (PATTERN (q));
2573 reset_used_flags (REG_NOTES (q));
2578 for (p = get_insns (); p; p = NEXT_INSN (p))
2581 verify_rtx_sharing (PATTERN (p), p);
2582 verify_rtx_sharing (REG_NOTES (p), p);
2585 timevar_pop (TV_VERIFY_RTL_SHARING);
2588 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2589 Assumes the mark bits are cleared at entry. */
2592 unshare_all_rtl_in_chain (rtx insn)
2594 for (; insn; insn = NEXT_INSN (insn))
2597 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2598 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2602 /* Go through all virtual stack slots of a function and mark them as
2603 shared. We never replace the DECL_RTLs themselves with a copy,
2604 but expressions mentioned into a DECL_RTL cannot be shared with
2605 expressions in the instruction stream.
2607 Note that reload may convert pseudo registers into memories in-place.
2608 Pseudo registers are always shared, but MEMs never are. Thus if we
2609 reset the used flags on MEMs in the instruction stream, we must set
2610 them again on MEMs that appear in DECL_RTLs. */
2613 set_used_decls (tree blk)
2618 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2619 if (DECL_RTL_SET_P (t))
2620 set_used_flags (DECL_RTL (t));
2622 /* Now process sub-blocks. */
2623 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2627 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2628 Recursively does the same for subexpressions. Uses
2629 copy_rtx_if_shared_1 to reduce stack space. */
2632 copy_rtx_if_shared (rtx orig)
2634 copy_rtx_if_shared_1 (&orig);
2638 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2639 use. Recursively does the same for subexpressions. */
2642 copy_rtx_if_shared_1 (rtx *orig1)
2648 const char *format_ptr;
2652 /* Repeat is used to turn tail-recursion into iteration. */
2659 code = GET_CODE (x);
2661 /* These types may be freely shared. */
2678 /* SCRATCH must be shared because they represent distinct values. */
2681 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2686 if (shared_const_p (x))
2696 /* The chain of insns is not being copied. */
2703 /* This rtx may not be shared. If it has already been seen,
2704 replace it with a copy of itself. */
2706 if (RTX_FLAG (x, used))
2708 x = shallow_copy_rtx (x);
2711 RTX_FLAG (x, used) = 1;
2713 /* Now scan the subexpressions recursively.
2714 We can store any replaced subexpressions directly into X
2715 since we know X is not shared! Any vectors in X
2716 must be copied if X was copied. */
2718 format_ptr = GET_RTX_FORMAT (code);
2719 length = GET_RTX_LENGTH (code);
2722 for (i = 0; i < length; i++)
2724 switch (*format_ptr++)
2728 copy_rtx_if_shared_1 (last_ptr);
2729 last_ptr = &XEXP (x, i);
2733 if (XVEC (x, i) != NULL)
2736 int len = XVECLEN (x, i);
2738 /* Copy the vector iff I copied the rtx and the length
2740 if (copied && len > 0)
2741 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2743 /* Call recursively on all inside the vector. */
2744 for (j = 0; j < len; j++)
2747 copy_rtx_if_shared_1 (last_ptr);
2748 last_ptr = &XVECEXP (x, i, j);
2763 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2766 mark_used_flags (rtx x, int flag)
2770 const char *format_ptr;
2773 /* Repeat is used to turn tail-recursion into iteration. */
2778 code = GET_CODE (x);
2780 /* These types may be freely shared so we needn't do any resetting
2805 /* The chain of insns is not being copied. */
2812 RTX_FLAG (x, used) = flag;
2814 format_ptr = GET_RTX_FORMAT (code);
2815 length = GET_RTX_LENGTH (code);
2817 for (i = 0; i < length; i++)
2819 switch (*format_ptr++)
2827 mark_used_flags (XEXP (x, i), flag);
2831 for (j = 0; j < XVECLEN (x, i); j++)
2832 mark_used_flags (XVECEXP (x, i, j), flag);
2838 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2839 to look for shared sub-parts. */
2842 reset_used_flags (rtx x)
2844 mark_used_flags (x, 0);
2847 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2848 to look for shared sub-parts. */
2851 set_used_flags (rtx x)
2853 mark_used_flags (x, 1);
2856 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2857 Return X or the rtx for the pseudo reg the value of X was copied into.
2858 OTHER must be valid as a SET_DEST. */
2861 make_safe_from (rtx x, rtx other)
2864 switch (GET_CODE (other))
2867 other = SUBREG_REG (other);
2869 case STRICT_LOW_PART:
2872 other = XEXP (other, 0);
2881 && GET_CODE (x) != SUBREG)
2883 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2884 || reg_mentioned_p (other, x))))
2886 rtx temp = gen_reg_rtx (GET_MODE (x));
2887 emit_move_insn (temp, x);
2893 /* Emission of insns (adding them to the doubly-linked list). */
2895 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2898 get_last_insn_anywhere (void)
2900 struct sequence_stack *stack;
2901 if (get_last_insn ())
2902 return get_last_insn ();
2903 for (stack = seq_stack; stack; stack = stack->next)
2904 if (stack->last != 0)
2909 /* Return the first nonnote insn emitted in current sequence or current
2910 function. This routine looks inside SEQUENCEs. */
2913 get_first_nonnote_insn (void)
2915 rtx insn = get_insns ();
2920 for (insn = next_insn (insn);
2921 insn && NOTE_P (insn);
2922 insn = next_insn (insn))
2926 if (NONJUMP_INSN_P (insn)
2927 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2928 insn = XVECEXP (PATTERN (insn), 0, 0);
2935 /* Return the last nonnote insn emitted in current sequence or current
2936 function. This routine looks inside SEQUENCEs. */
2939 get_last_nonnote_insn (void)
2941 rtx insn = get_last_insn ();
2946 for (insn = previous_insn (insn);
2947 insn && NOTE_P (insn);
2948 insn = previous_insn (insn))
2952 if (NONJUMP_INSN_P (insn)
2953 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2954 insn = XVECEXP (PATTERN (insn), 0,
2955 XVECLEN (PATTERN (insn), 0) - 1);
2962 /* Return the number of actual (non-debug) insns emitted in this
2966 get_max_insn_count (void)
2968 int n = cur_insn_uid;
2970 /* The table size must be stable across -g, to avoid codegen
2971 differences due to debug insns, and not be affected by
2972 -fmin-insn-uid, to avoid excessive table size and to simplify
2973 debugging of -fcompare-debug failures. */
2974 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
2975 n -= cur_debug_insn_uid;
2977 n -= MIN_NONDEBUG_INSN_UID;
2983 /* Return the next insn. If it is a SEQUENCE, return the first insn
2987 next_insn (rtx insn)
2991 insn = NEXT_INSN (insn);
2992 if (insn && NONJUMP_INSN_P (insn)
2993 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2994 insn = XVECEXP (PATTERN (insn), 0, 0);
3000 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3004 previous_insn (rtx insn)
3008 insn = PREV_INSN (insn);
3009 if (insn && NONJUMP_INSN_P (insn)
3010 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3011 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3017 /* Return the next insn after INSN that is not a NOTE. This routine does not
3018 look inside SEQUENCEs. */
3021 next_nonnote_insn (rtx insn)
3025 insn = NEXT_INSN (insn);
3026 if (insn == 0 || !NOTE_P (insn))
3033 /* Return the next insn after INSN that is not a NOTE, but stop the
3034 search before we enter another basic block. This routine does not
3035 look inside SEQUENCEs. */
3038 next_nonnote_insn_bb (rtx insn)
3042 insn = NEXT_INSN (insn);
3043 if (insn == 0 || !NOTE_P (insn))
3045 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3052 /* Return the previous insn before INSN that is not a NOTE. This routine does
3053 not look inside SEQUENCEs. */
3056 prev_nonnote_insn (rtx insn)
3060 insn = PREV_INSN (insn);
3061 if (insn == 0 || !NOTE_P (insn))
3068 /* Return the previous insn before INSN that is not a NOTE, but stop
3069 the search before we enter another basic block. This routine does
3070 not look inside SEQUENCEs. */
3073 prev_nonnote_insn_bb (rtx insn)
3077 insn = PREV_INSN (insn);
3078 if (insn == 0 || !NOTE_P (insn))
3080 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3087 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3088 routine does not look inside SEQUENCEs. */
3091 next_nondebug_insn (rtx insn)
3095 insn = NEXT_INSN (insn);
3096 if (insn == 0 || !DEBUG_INSN_P (insn))
3103 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3104 This routine does not look inside SEQUENCEs. */
3107 prev_nondebug_insn (rtx insn)
3111 insn = PREV_INSN (insn);
3112 if (insn == 0 || !DEBUG_INSN_P (insn))
3119 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3120 This routine does not look inside SEQUENCEs. */
3123 next_nonnote_nondebug_insn (rtx insn)
3127 insn = NEXT_INSN (insn);
3128 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3135 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3136 This routine does not look inside SEQUENCEs. */
3139 prev_nonnote_nondebug_insn (rtx insn)
3143 insn = PREV_INSN (insn);
3144 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3151 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3152 or 0, if there is none. This routine does not look inside
3156 next_real_insn (rtx insn)
3160 insn = NEXT_INSN (insn);
3161 if (insn == 0 || INSN_P (insn))
3168 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3169 or 0, if there is none. This routine does not look inside
3173 prev_real_insn (rtx insn)
3177 insn = PREV_INSN (insn);
3178 if (insn == 0 || INSN_P (insn))
3185 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3186 This routine does not look inside SEQUENCEs. */
3189 last_call_insn (void)
3193 for (insn = get_last_insn ();
3194 insn && !CALL_P (insn);
3195 insn = PREV_INSN (insn))
3201 /* Find the next insn after INSN that really does something. This routine
3202 does not look inside SEQUENCEs. After reload this also skips over
3203 standalone USE and CLOBBER insn. */
3206 active_insn_p (const_rtx insn)
3208 return (CALL_P (insn) || JUMP_P (insn)
3209 || (NONJUMP_INSN_P (insn)
3210 && (! reload_completed
3211 || (GET_CODE (PATTERN (insn)) != USE
3212 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3216 next_active_insn (rtx insn)
3220 insn = NEXT_INSN (insn);
3221 if (insn == 0 || active_insn_p (insn))
3228 /* Find the last insn before INSN that really does something. This routine
3229 does not look inside SEQUENCEs. After reload this also skips over
3230 standalone USE and CLOBBER insn. */
3233 prev_active_insn (rtx insn)
3237 insn = PREV_INSN (insn);
3238 if (insn == 0 || active_insn_p (insn))
3245 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3248 next_label (rtx insn)
3252 insn = NEXT_INSN (insn);
3253 if (insn == 0 || LABEL_P (insn))
3260 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3263 prev_label (rtx insn)
3267 insn = PREV_INSN (insn);
3268 if (insn == 0 || LABEL_P (insn))
3275 /* Return the last label to mark the same position as LABEL. Return null
3276 if LABEL itself is null. */
3279 skip_consecutive_labels (rtx label)
3283 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3291 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3292 and REG_CC_USER notes so we can find it. */
3295 link_cc0_insns (rtx insn)
3297 rtx user = next_nonnote_insn (insn);
3299 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3300 user = XVECEXP (PATTERN (user), 0, 0);
3302 add_reg_note (user, REG_CC_SETTER, insn);
3303 add_reg_note (insn, REG_CC_USER, user);
3306 /* Return the next insn that uses CC0 after INSN, which is assumed to
3307 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3308 applied to the result of this function should yield INSN).
3310 Normally, this is simply the next insn. However, if a REG_CC_USER note
3311 is present, it contains the insn that uses CC0.
3313 Return 0 if we can't find the insn. */
3316 next_cc0_user (rtx insn)
3318 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3321 return XEXP (note, 0);
3323 insn = next_nonnote_insn (insn);
3324 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3325 insn = XVECEXP (PATTERN (insn), 0, 0);
3327 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3333 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3334 note, it is the previous insn. */
3337 prev_cc0_setter (rtx insn)
3339 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3342 return XEXP (note, 0);
3344 insn = prev_nonnote_insn (insn);
3345 gcc_assert (sets_cc0_p (PATTERN (insn)));
3352 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3355 find_auto_inc (rtx *xp, void *data)
3358 rtx reg = (rtx) data;
3360 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3363 switch (GET_CODE (x))
3371 if (rtx_equal_p (reg, XEXP (x, 0)))
3382 /* Increment the label uses for all labels present in rtx. */
3385 mark_label_nuses (rtx x)
3391 code = GET_CODE (x);
3392 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3393 LABEL_NUSES (XEXP (x, 0))++;
3395 fmt = GET_RTX_FORMAT (code);
3396 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3399 mark_label_nuses (XEXP (x, i));
3400 else if (fmt[i] == 'E')
3401 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3402 mark_label_nuses (XVECEXP (x, i, j));
3407 /* Try splitting insns that can be split for better scheduling.
3408 PAT is the pattern which might split.
3409 TRIAL is the insn providing PAT.
3410 LAST is nonzero if we should return the last insn of the sequence produced.
3412 If this routine succeeds in splitting, it returns the first or last
3413 replacement insn depending on the value of LAST. Otherwise, it
3414 returns TRIAL. If the insn to be returned can be split, it will be. */
3417 try_split (rtx pat, rtx trial, int last)
3419 rtx before = PREV_INSN (trial);
3420 rtx after = NEXT_INSN (trial);
3421 int has_barrier = 0;
3424 rtx insn_last, insn;
3427 /* We're not good at redistributing frame information. */
3428 if (RTX_FRAME_RELATED_P (trial))
3431 if (any_condjump_p (trial)
3432 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3433 split_branch_probability = INTVAL (XEXP (note, 0));
3434 probability = split_branch_probability;
3436 seq = split_insns (pat, trial);
3438 split_branch_probability = -1;
3440 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3441 We may need to handle this specially. */
3442 if (after && BARRIER_P (after))
3445 after = NEXT_INSN (after);
3451 /* Avoid infinite loop if any insn of the result matches
3452 the original pattern. */
3456 if (INSN_P (insn_last)
3457 && rtx_equal_p (PATTERN (insn_last), pat))
3459 if (!NEXT_INSN (insn_last))
3461 insn_last = NEXT_INSN (insn_last);
3464 /* We will be adding the new sequence to the function. The splitters
3465 may have introduced invalid RTL sharing, so unshare the sequence now. */
3466 unshare_all_rtl_in_chain (seq);
3469 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3473 mark_jump_label (PATTERN (insn), insn, 0);
3475 if (probability != -1
3476 && any_condjump_p (insn)
3477 && !find_reg_note (insn, REG_BR_PROB, 0))
3479 /* We can preserve the REG_BR_PROB notes only if exactly
3480 one jump is created, otherwise the machine description
3481 is responsible for this step using
3482 split_branch_probability variable. */
3483 gcc_assert (njumps == 1);
3484 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3489 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3490 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3493 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3496 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3499 *p = CALL_INSN_FUNCTION_USAGE (trial);
3500 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3502 /* Update the debug information for the CALL_INSN. */
3503 if (flag_enable_icf_debug)
3504 (*debug_hooks->copy_call_info) (trial, insn);
3508 /* Copy notes, particularly those related to the CFG. */
3509 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3511 switch (REG_NOTE_KIND (note))
3514 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3519 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3522 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3526 case REG_NON_LOCAL_GOTO:
3527 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3530 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3536 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3538 rtx reg = XEXP (note, 0);
3539 if (!FIND_REG_INC_NOTE (insn, reg)
3540 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3541 add_reg_note (insn, REG_INC, reg);
3551 /* If there are LABELS inside the split insns increment the
3552 usage count so we don't delete the label. */
3556 while (insn != NULL_RTX)
3558 /* JUMP_P insns have already been "marked" above. */
3559 if (NONJUMP_INSN_P (insn))
3560 mark_label_nuses (PATTERN (insn));
3562 insn = PREV_INSN (insn);
3566 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3568 delete_insn (trial);
3570 emit_barrier_after (tem);
3572 /* Recursively call try_split for each new insn created; by the
3573 time control returns here that insn will be fully split, so
3574 set LAST and continue from the insn after the one returned.
3575 We can't use next_active_insn here since AFTER may be a note.
3576 Ignore deleted insns, which can be occur if not optimizing. */
3577 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3578 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3579 tem = try_split (PATTERN (tem), tem, 1);
3581 /* Return either the first or the last insn, depending on which was
3584 ? (after ? PREV_INSN (after) : get_last_insn ())
3585 : NEXT_INSN (before);
3588 /* Make and return an INSN rtx, initializing all its slots.
3589 Store PATTERN in the pattern slots. */
3592 make_insn_raw (rtx pattern)
3596 insn = rtx_alloc (INSN);
3598 INSN_UID (insn) = cur_insn_uid++;
3599 PATTERN (insn) = pattern;
3600 INSN_CODE (insn) = -1;
3601 REG_NOTES (insn) = NULL;
3602 INSN_LOCATOR (insn) = curr_insn_locator ();
3603 BLOCK_FOR_INSN (insn) = NULL;
3605 #ifdef ENABLE_RTL_CHECKING
3608 && (returnjump_p (insn)
3609 || (GET_CODE (insn) == SET
3610 && SET_DEST (insn) == pc_rtx)))
3612 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3620 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3623 make_debug_insn_raw (rtx pattern)
3627 insn = rtx_alloc (DEBUG_INSN);
3628 INSN_UID (insn) = cur_debug_insn_uid++;
3629 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3630 INSN_UID (insn) = cur_insn_uid++;
3632 PATTERN (insn) = pattern;
3633 INSN_CODE (insn) = -1;
3634 REG_NOTES (insn) = NULL;
3635 INSN_LOCATOR (insn) = curr_insn_locator ();
3636 BLOCK_FOR_INSN (insn) = NULL;
3641 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3644 make_jump_insn_raw (rtx pattern)
3648 insn = rtx_alloc (JUMP_INSN);
3649 INSN_UID (insn) = cur_insn_uid++;
3651 PATTERN (insn) = pattern;
3652 INSN_CODE (insn) = -1;
3653 REG_NOTES (insn) = NULL;
3654 JUMP_LABEL (insn) = NULL;
3655 INSN_LOCATOR (insn) = curr_insn_locator ();
3656 BLOCK_FOR_INSN (insn) = NULL;
3661 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3664 make_call_insn_raw (rtx pattern)
3668 insn = rtx_alloc (CALL_INSN);
3669 INSN_UID (insn) = cur_insn_uid++;
3671 PATTERN (insn) = pattern;
3672 INSN_CODE (insn) = -1;
3673 REG_NOTES (insn) = NULL;
3674 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3675 INSN_LOCATOR (insn) = curr_insn_locator ();
3676 BLOCK_FOR_INSN (insn) = NULL;
3681 /* Add INSN to the end of the doubly-linked list.
3682 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3687 PREV_INSN (insn) = get_last_insn();
3688 NEXT_INSN (insn) = 0;
3690 if (NULL != get_last_insn())
3691 NEXT_INSN (get_last_insn ()) = insn;
3693 if (NULL == get_insns ())
3694 set_first_insn (insn);
3696 set_last_insn (insn);
3699 /* Add INSN into the doubly-linked list after insn AFTER. This and
3700 the next should be the only functions called to insert an insn once
3701 delay slots have been filled since only they know how to update a
3705 add_insn_after (rtx insn, rtx after, basic_block bb)
3707 rtx next = NEXT_INSN (after);
3709 gcc_assert (!optimize || !INSN_DELETED_P (after));
3711 NEXT_INSN (insn) = next;
3712 PREV_INSN (insn) = after;
3716 PREV_INSN (next) = insn;
3717 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3718 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3720 else if (get_last_insn () == after)
3721 set_last_insn (insn);
3724 struct sequence_stack *stack = seq_stack;
3725 /* Scan all pending sequences too. */
3726 for (; stack; stack = stack->next)
3727 if (after == stack->last)
3736 if (!BARRIER_P (after)
3737 && !BARRIER_P (insn)
3738 && (bb = BLOCK_FOR_INSN (after)))
3740 set_block_for_insn (insn, bb);
3742 df_insn_rescan (insn);
3743 /* Should not happen as first in the BB is always
3744 either NOTE or LABEL. */
3745 if (BB_END (bb) == after
3746 /* Avoid clobbering of structure when creating new BB. */
3747 && !BARRIER_P (insn)
3748 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3752 NEXT_INSN (after) = insn;
3753 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3755 rtx sequence = PATTERN (after);
3756 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3760 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3761 the previous should be the only functions called to insert an insn
3762 once delay slots have been filled since only they know how to
3763 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3767 add_insn_before (rtx insn, rtx before, basic_block bb)
3769 rtx prev = PREV_INSN (before);
3771 gcc_assert (!optimize || !INSN_DELETED_P (before));
3773 PREV_INSN (insn) = prev;
3774 NEXT_INSN (insn) = before;
3778 NEXT_INSN (prev) = insn;
3779 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3781 rtx sequence = PATTERN (prev);
3782 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3785 else if (get_insns () == before)
3786 set_first_insn (insn);
3789 struct sequence_stack *stack = seq_stack;
3790 /* Scan all pending sequences too. */
3791 for (; stack; stack = stack->next)
3792 if (before == stack->first)
3794 stack->first = insn;
3802 && !BARRIER_P (before)
3803 && !BARRIER_P (insn))
3804 bb = BLOCK_FOR_INSN (before);
3808 set_block_for_insn (insn, bb);
3810 df_insn_rescan (insn);
3811 /* Should not happen as first in the BB is always either NOTE or
3813 gcc_assert (BB_HEAD (bb) != insn
3814 /* Avoid clobbering of structure when creating new BB. */
3816 || NOTE_INSN_BASIC_BLOCK_P (insn));
3819 PREV_INSN (before) = insn;
3820 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3821 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3825 /* Replace insn with an deleted instruction note. */
3828 set_insn_deleted (rtx insn)
3830 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3831 PUT_CODE (insn, NOTE);
3832 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3836 /* Remove an insn from its doubly-linked list. This function knows how
3837 to handle sequences. */
3839 remove_insn (rtx insn)
3841 rtx next = NEXT_INSN (insn);
3842 rtx prev = PREV_INSN (insn);
3845 /* Later in the code, the block will be marked dirty. */
3846 df_insn_delete (NULL, INSN_UID (insn));
3850 NEXT_INSN (prev) = next;
3851 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3853 rtx sequence = PATTERN (prev);
3854 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3857 else if (get_insns () == insn)
3860 PREV_INSN (next) = NULL;
3861 set_first_insn (next);
3865 struct sequence_stack *stack = seq_stack;
3866 /* Scan all pending sequences too. */
3867 for (; stack; stack = stack->next)
3868 if (insn == stack->first)
3870 stack->first = next;
3879 PREV_INSN (next) = prev;
3880 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3881 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3883 else if (get_last_insn () == insn)
3884 set_last_insn (prev);
3887 struct sequence_stack *stack = seq_stack;
3888 /* Scan all pending sequences too. */
3889 for (; stack; stack = stack->next)
3890 if (insn == stack->last)
3898 if (!BARRIER_P (insn)
3899 && (bb = BLOCK_FOR_INSN (insn)))
3901 if (NONDEBUG_INSN_P (insn))
3902 df_set_bb_dirty (bb);
3903 if (BB_HEAD (bb) == insn)
3905 /* Never ever delete the basic block note without deleting whole
3907 gcc_assert (!NOTE_P (insn));
3908 BB_HEAD (bb) = next;
3910 if (BB_END (bb) == insn)
3915 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3918 add_function_usage_to (rtx call_insn, rtx call_fusage)
3920 gcc_assert (call_insn && CALL_P (call_insn));
3922 /* Put the register usage information on the CALL. If there is already
3923 some usage information, put ours at the end. */
3924 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3928 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3929 link = XEXP (link, 1))
3932 XEXP (link, 1) = call_fusage;
3935 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3938 /* Delete all insns made since FROM.
3939 FROM becomes the new last instruction. */
3942 delete_insns_since (rtx from)
3947 NEXT_INSN (from) = 0;
3948 set_last_insn (from);
3951 /* This function is deprecated, please use sequences instead.
3953 Move a consecutive bunch of insns to a different place in the chain.
3954 The insns to be moved are those between FROM and TO.
3955 They are moved to a new position after the insn AFTER.
3956 AFTER must not be FROM or TO or any insn in between.
3958 This function does not know about SEQUENCEs and hence should not be
3959 called after delay-slot filling has been done. */
3962 reorder_insns_nobb (rtx from, rtx to, rtx after)
3964 #ifdef ENABLE_CHECKING
3966 for (x = from; x != to; x = NEXT_INSN (x))
3967 gcc_assert (after != x);
3968 gcc_assert (after != to);
3971 /* Splice this bunch out of where it is now. */
3972 if (PREV_INSN (from))
3973 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3975 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3976 if (get_last_insn () == to)
3977 set_last_insn (PREV_INSN (from));
3978 if (get_insns () == from)
3979 set_first_insn (NEXT_INSN (to));
3981 /* Make the new neighbors point to it and it to them. */
3982 if (NEXT_INSN (after))
3983 PREV_INSN (NEXT_INSN (after)) = to;
3985 NEXT_INSN (to) = NEXT_INSN (after);
3986 PREV_INSN (from) = after;
3987 NEXT_INSN (after) = from;
3988 if (after == get_last_insn())
3992 /* Same as function above, but take care to update BB boundaries. */
3994 reorder_insns (rtx from, rtx to, rtx after)
3996 rtx prev = PREV_INSN (from);
3997 basic_block bb, bb2;
3999 reorder_insns_nobb (from, to, after);
4001 if (!BARRIER_P (after)
4002 && (bb = BLOCK_FOR_INSN (after)))
4005 df_set_bb_dirty (bb);
4007 if (!BARRIER_P (from)
4008 && (bb2 = BLOCK_FOR_INSN (from)))
4010 if (BB_END (bb2) == to)
4011 BB_END (bb2) = prev;
4012 df_set_bb_dirty (bb2);
4015 if (BB_END (bb) == after)
4018 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4020 df_insn_change_bb (x, bb);
4025 /* Emit insn(s) of given code and pattern
4026 at a specified place within the doubly-linked list.
4028 All of the emit_foo global entry points accept an object
4029 X which is either an insn list or a PATTERN of a single
4032 There are thus a few canonical ways to generate code and
4033 emit it at a specific place in the instruction stream. For
4034 example, consider the instruction named SPOT and the fact that
4035 we would like to emit some instructions before SPOT. We might
4039 ... emit the new instructions ...
4040 insns_head = get_insns ();
4043 emit_insn_before (insns_head, SPOT);
4045 It used to be common to generate SEQUENCE rtl instead, but that
4046 is a relic of the past which no longer occurs. The reason is that
4047 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4048 generated would almost certainly die right after it was created. */
4050 /* Make X be output before the instruction BEFORE. */
4053 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4058 gcc_assert (before);
4063 switch (GET_CODE (x))
4075 rtx next = NEXT_INSN (insn);
4076 add_insn_before (insn, before, bb);
4082 #ifdef ENABLE_RTL_CHECKING
4089 last = make_insn_raw (x);
4090 add_insn_before (last, before, bb);
4097 /* Make an instruction with body X and code JUMP_INSN
4098 and output it before the instruction BEFORE. */
4101 emit_jump_insn_before_noloc (rtx x, rtx before)
4103 rtx insn, last = NULL_RTX;
4105 gcc_assert (before);
4107 switch (GET_CODE (x))
4119 rtx next = NEXT_INSN (insn);
4120 add_insn_before (insn, before, NULL);
4126 #ifdef ENABLE_RTL_CHECKING
4133 last = make_jump_insn_raw (x);
4134 add_insn_before (last, before, NULL);
4141 /* Make an instruction with body X and code CALL_INSN
4142 and output it before the instruction BEFORE. */
4145 emit_call_insn_before_noloc (rtx x, rtx before)
4147 rtx last = NULL_RTX, insn;
4149 gcc_assert (before);
4151 switch (GET_CODE (x))
4163 rtx next = NEXT_INSN (insn);
4164 add_insn_before (insn, before, NULL);
4170 #ifdef ENABLE_RTL_CHECKING
4177 last = make_call_insn_raw (x);
4178 add_insn_before (last, before, NULL);
4185 /* Make an instruction with body X and code DEBUG_INSN
4186 and output it before the instruction BEFORE. */
4189 emit_debug_insn_before_noloc (rtx x, rtx before)
4191 rtx last = NULL_RTX, insn;
4193 gcc_assert (before);
4195 switch (GET_CODE (x))
4207 rtx next = NEXT_INSN (insn);
4208 add_insn_before (insn, before, NULL);
4214 #ifdef ENABLE_RTL_CHECKING
4221 last = make_debug_insn_raw (x);
4222 add_insn_before (last, before, NULL);
4229 /* Make an insn of code BARRIER
4230 and output it before the insn BEFORE. */
4233 emit_barrier_before (rtx before)
4235 rtx insn = rtx_alloc (BARRIER);
4237 INSN_UID (insn) = cur_insn_uid++;
4239 add_insn_before (insn, before, NULL);
4243 /* Emit the label LABEL before the insn BEFORE. */
4246 emit_label_before (rtx label, rtx before)
4248 /* This can be called twice for the same label as a result of the
4249 confusion that follows a syntax error! So make it harmless. */
4250 if (INSN_UID (label) == 0)
4252 INSN_UID (label) = cur_insn_uid++;
4253 add_insn_before (label, before, NULL);
4259 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4262 emit_note_before (enum insn_note subtype, rtx before)
4264 rtx note = rtx_alloc (NOTE);
4265 INSN_UID (note) = cur_insn_uid++;
4266 NOTE_KIND (note) = subtype;
4267 BLOCK_FOR_INSN (note) = NULL;
4268 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4270 add_insn_before (note, before, NULL);
4274 /* Helper for emit_insn_after, handles lists of instructions
4278 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4282 if (!bb && !BARRIER_P (after))
4283 bb = BLOCK_FOR_INSN (after);
4287 df_set_bb_dirty (bb);
4288 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4289 if (!BARRIER_P (last))
4291 set_block_for_insn (last, bb);
4292 df_insn_rescan (last);
4294 if (!BARRIER_P (last))
4296 set_block_for_insn (last, bb);
4297 df_insn_rescan (last);
4299 if (BB_END (bb) == after)
4303 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4306 after_after = NEXT_INSN (after);
4308 NEXT_INSN (after) = first;
4309 PREV_INSN (first) = after;
4310 NEXT_INSN (last) = after_after;
4312 PREV_INSN (after_after) = last;
4314 if (after == get_last_insn())
4315 set_last_insn (last);
4320 /* Make X be output after the insn AFTER and set the BB of insn. If
4321 BB is NULL, an attempt is made to infer the BB from AFTER. */
4324 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4333 switch (GET_CODE (x))
4342 last = emit_insn_after_1 (x, after, bb);
4345 #ifdef ENABLE_RTL_CHECKING
4352 last = make_insn_raw (x);
4353 add_insn_after (last, after, bb);
4361 /* Make an insn of code JUMP_INSN with body X
4362 and output it after the insn AFTER. */
4365 emit_jump_insn_after_noloc (rtx x, rtx after)
4371 switch (GET_CODE (x))
4380 last = emit_insn_after_1 (x, after, NULL);
4383 #ifdef ENABLE_RTL_CHECKING
4390 last = make_jump_insn_raw (x);
4391 add_insn_after (last, after, NULL);
4398 /* Make an instruction with body X and code CALL_INSN
4399 and output it after the instruction AFTER. */
4402 emit_call_insn_after_noloc (rtx x, rtx after)
4408 switch (GET_CODE (x))
4417 last = emit_insn_after_1 (x, after, NULL);
4420 #ifdef ENABLE_RTL_CHECKING
4427 last = make_call_insn_raw (x);
4428 add_insn_after (last, after, NULL);
4435 /* Make an instruction with body X and code CALL_INSN
4436 and output it after the instruction AFTER. */
4439 emit_debug_insn_after_noloc (rtx x, rtx after)
4445 switch (GET_CODE (x))
4454 last = emit_insn_after_1 (x, after, NULL);
4457 #ifdef ENABLE_RTL_CHECKING
4464 last = make_debug_insn_raw (x);
4465 add_insn_after (last, after, NULL);
4472 /* Make an insn of code BARRIER
4473 and output it after the insn AFTER. */
4476 emit_barrier_after (rtx after)
4478 rtx insn = rtx_alloc (BARRIER);
4480 INSN_UID (insn) = cur_insn_uid++;
4482 add_insn_after (insn, after, NULL);
4486 /* Emit the label LABEL after the insn AFTER. */
4489 emit_label_after (rtx label, rtx after)
4491 /* This can be called twice for the same label
4492 as a result of the confusion that follows a syntax error!
4493 So make it harmless. */
4494 if (INSN_UID (label) == 0)
4496 INSN_UID (label) = cur_insn_uid++;
4497 add_insn_after (label, after, NULL);
4503 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4506 emit_note_after (enum insn_note subtype, rtx after)
4508 rtx note = rtx_alloc (NOTE);
4509 INSN_UID (note) = cur_insn_uid++;
4510 NOTE_KIND (note) = subtype;
4511 BLOCK_FOR_INSN (note) = NULL;
4512 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4513 add_insn_after (note, after, NULL);
4517 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4519 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4521 rtx last = emit_insn_after_noloc (pattern, after, NULL);
4523 if (pattern == NULL_RTX || !loc)
4526 after = NEXT_INSN (after);
4529 if (active_insn_p (after) && !INSN_LOCATOR (after))
4530 INSN_LOCATOR (after) = loc;
4533 after = NEXT_INSN (after);
4538 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4540 emit_insn_after (rtx pattern, rtx after)
4544 while (DEBUG_INSN_P (prev))
4545 prev = PREV_INSN (prev);
4548 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4550 return emit_insn_after_noloc (pattern, after, NULL);
4553 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4555 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4557 rtx last = emit_jump_insn_after_noloc (pattern, after);
4559 if (pattern == NULL_RTX || !loc)
4562 after = NEXT_INSN (after);
4565 if (active_insn_p (after) && !INSN_LOCATOR (after))
4566 INSN_LOCATOR (after) = loc;
4569 after = NEXT_INSN (after);
4574 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4576 emit_jump_insn_after (rtx pattern, rtx after)
4580 while (DEBUG_INSN_P (prev))
4581 prev = PREV_INSN (prev);
4584 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4586 return emit_jump_insn_after_noloc (pattern, after);
4589 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4591 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4593 rtx last = emit_call_insn_after_noloc (pattern, after);
4595 if (pattern == NULL_RTX || !loc)
4598 after = NEXT_INSN (after);
4601 if (active_insn_p (after) && !INSN_LOCATOR (after))
4602 INSN_LOCATOR (after) = loc;
4605 after = NEXT_INSN (after);
4610 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4612 emit_call_insn_after (rtx pattern, rtx after)
4616 while (DEBUG_INSN_P (prev))
4617 prev = PREV_INSN (prev);
4620 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4622 return emit_call_insn_after_noloc (pattern, after);
4625 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4627 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4629 rtx last = emit_debug_insn_after_noloc (pattern, after);
4631 if (pattern == NULL_RTX || !loc)
4634 after = NEXT_INSN (after);
4637 if (active_insn_p (after) && !INSN_LOCATOR (after))
4638 INSN_LOCATOR (after) = loc;
4641 after = NEXT_INSN (after);
4646 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4648 emit_debug_insn_after (rtx pattern, rtx after)
4651 return emit_debug_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4653 return emit_debug_insn_after_noloc (pattern, after);
4656 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
4658 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4660 rtx first = PREV_INSN (before);
4661 rtx last = emit_insn_before_noloc (pattern, before, NULL);
4663 if (pattern == NULL_RTX || !loc)
4667 first = get_insns ();
4669 first = NEXT_INSN (first);
4672 if (active_insn_p (first) && !INSN_LOCATOR (first))
4673 INSN_LOCATOR (first) = loc;
4676 first = NEXT_INSN (first);
4681 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4683 emit_insn_before (rtx pattern, rtx before)
4687 while (DEBUG_INSN_P (next))
4688 next = PREV_INSN (next);
4691 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4693 return emit_insn_before_noloc (pattern, before, NULL);
4696 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4698 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4700 rtx first = PREV_INSN (before);
4701 rtx last = emit_jump_insn_before_noloc (pattern, before);
4703 if (pattern == NULL_RTX)
4706 first = NEXT_INSN (first);
4709 if (active_insn_p (first) && !INSN_LOCATOR (first))
4710 INSN_LOCATOR (first) = loc;
4713 first = NEXT_INSN (first);
4718 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4720 emit_jump_insn_before (rtx pattern, rtx before)
4724 while (DEBUG_INSN_P (next))
4725 next = PREV_INSN (next);
4728 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4730 return emit_jump_insn_before_noloc (pattern, before);
4733 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4735 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4737 rtx first = PREV_INSN (before);
4738 rtx last = emit_call_insn_before_noloc (pattern, before);
4740 if (pattern == NULL_RTX)
4743 first = NEXT_INSN (first);
4746 if (active_insn_p (first) && !INSN_LOCATOR (first))
4747 INSN_LOCATOR (first) = loc;
4750 first = NEXT_INSN (first);
4755 /* like emit_call_insn_before_noloc,
4756 but set insn_locator according to before. */
4758 emit_call_insn_before (rtx pattern, rtx before)
4762 while (DEBUG_INSN_P (next))
4763 next = PREV_INSN (next);
4766 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4768 return emit_call_insn_before_noloc (pattern, before);
4771 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4773 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4775 rtx first = PREV_INSN (before);
4776 rtx last = emit_debug_insn_before_noloc (pattern, before);
4778 if (pattern == NULL_RTX)
4781 first = NEXT_INSN (first);
4784 if (active_insn_p (first) && !INSN_LOCATOR (first))
4785 INSN_LOCATOR (first) = loc;
4788 first = NEXT_INSN (first);
4793 /* like emit_debug_insn_before_noloc,
4794 but set insn_locator according to before. */
4796 emit_debug_insn_before (rtx pattern, rtx before)
4798 if (INSN_P (before))
4799 return emit_debug_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4801 return emit_debug_insn_before_noloc (pattern, before);
4804 /* Take X and emit it at the end of the doubly-linked
4807 Returns the last insn emitted. */
4812 rtx last = get_last_insn();
4818 switch (GET_CODE (x))
4830 rtx next = NEXT_INSN (insn);
4837 #ifdef ENABLE_RTL_CHECKING
4844 last = make_insn_raw (x);
4852 /* Make an insn of code DEBUG_INSN with pattern X
4853 and add it to the end of the doubly-linked list. */
4856 emit_debug_insn (rtx x)
4858 rtx last = get_last_insn();
4864 switch (GET_CODE (x))
4876 rtx next = NEXT_INSN (insn);
4883 #ifdef ENABLE_RTL_CHECKING
4890 last = make_debug_insn_raw (x);
4898 /* Make an insn of code JUMP_INSN with pattern X
4899 and add it to the end of the doubly-linked list. */
4902 emit_jump_insn (rtx x)
4904 rtx last = NULL_RTX, insn;
4906 switch (GET_CODE (x))
4918 rtx next = NEXT_INSN (insn);
4925 #ifdef ENABLE_RTL_CHECKING
4932 last = make_jump_insn_raw (x);
4940 /* Make an insn of code CALL_INSN with pattern X
4941 and add it to the end of the doubly-linked list. */
4944 emit_call_insn (rtx x)
4948 switch (GET_CODE (x))
4957 insn = emit_insn (x);
4960 #ifdef ENABLE_RTL_CHECKING
4967 insn = make_call_insn_raw (x);
4975 /* Add the label LABEL to the end of the doubly-linked list. */
4978 emit_label (rtx label)
4980 /* This can be called twice for the same label
4981 as a result of the confusion that follows a syntax error!
4982 So make it harmless. */
4983 if (INSN_UID (label) == 0)
4985 INSN_UID (label) = cur_insn_uid++;
4991 /* Make an insn of code BARRIER
4992 and add it to the end of the doubly-linked list. */
4997 rtx barrier = rtx_alloc (BARRIER);
4998 INSN_UID (barrier) = cur_insn_uid++;
5003 /* Emit a copy of note ORIG. */
5006 emit_note_copy (rtx orig)
5010 note = rtx_alloc (NOTE);
5012 INSN_UID (note) = cur_insn_uid++;
5013 NOTE_DATA (note) = NOTE_DATA (orig);
5014 NOTE_KIND (note) = NOTE_KIND (orig);
5015 BLOCK_FOR_INSN (note) = NULL;
5021 /* Make an insn of code NOTE or type NOTE_NO
5022 and add it to the end of the doubly-linked list. */
5025 emit_note (enum insn_note kind)
5029 note = rtx_alloc (NOTE);
5030 INSN_UID (note) = cur_insn_uid++;
5031 NOTE_KIND (note) = kind;
5032 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
5033 BLOCK_FOR_INSN (note) = NULL;
5038 /* Emit a clobber of lvalue X. */
5041 emit_clobber (rtx x)
5043 /* CONCATs should not appear in the insn stream. */
5044 if (GET_CODE (x) == CONCAT)
5046 emit_clobber (XEXP (x, 0));
5047 return emit_clobber (XEXP (x, 1));
5049 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5052 /* Return a sequence of insns to clobber lvalue X. */
5066 /* Emit a use of rvalue X. */
5071 /* CONCATs should not appear in the insn stream. */
5072 if (GET_CODE (x) == CONCAT)
5074 emit_use (XEXP (x, 0));
5075 return emit_use (XEXP (x, 1));
5077 return emit_insn (gen_rtx_USE (VOIDmode, x));
5080 /* Return a sequence of insns to use rvalue X. */
5094 /* Cause next statement to emit a line note even if the line number
5098 force_next_line_note (void)
5103 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5104 note of this type already exists, remove it first. */
5107 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5109 rtx note = find_reg_note (insn, kind, NULL_RTX);
5115 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5116 has multiple sets (some callers assume single_set
5117 means the insn only has one set, when in fact it
5118 means the insn only has one * useful * set). */
5119 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5125 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5126 It serves no useful purpose and breaks eliminate_regs. */
5127 if (GET_CODE (datum) == ASM_OPERANDS)
5132 XEXP (note, 0) = datum;
5133 df_notes_rescan (insn);
5141 XEXP (note, 0) = datum;
5147 add_reg_note (insn, kind, datum);
5153 df_notes_rescan (insn);
5159 return REG_NOTES (insn);
5162 /* Return an indication of which type of insn should have X as a body.
5163 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5165 static enum rtx_code
5166 classify_insn (rtx x)
5170 if (GET_CODE (x) == CALL)
5172 if (GET_CODE (x) == RETURN)
5174 if (GET_CODE (x) == SET)
5176 if (SET_DEST (x) == pc_rtx)
5178 else if (GET_CODE (SET_SRC (x)) == CALL)
5183 if (GET_CODE (x) == PARALLEL)
5186 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5187 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5189 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5190 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5192 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5193 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5199 /* Emit the rtl pattern X as an appropriate kind of insn.
5200 If X is a label, it is simply added into the insn chain. */
5205 enum rtx_code code = classify_insn (x);
5210 return emit_label (x);
5212 return emit_insn (x);
5215 rtx insn = emit_jump_insn (x);
5216 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5217 return emit_barrier ();
5221 return emit_call_insn (x);
5223 return emit_debug_insn (x);
5229 /* Space for free sequence stack entries. */
5230 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5232 /* Begin emitting insns to a sequence. If this sequence will contain
5233 something that might cause the compiler to pop arguments to function
5234 calls (because those pops have previously been deferred; see
5235 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5236 before calling this function. That will ensure that the deferred
5237 pops are not accidentally emitted in the middle of this sequence. */
5240 start_sequence (void)
5242 struct sequence_stack *tem;
5244 if (free_sequence_stack != NULL)
5246 tem = free_sequence_stack;
5247 free_sequence_stack = tem->next;
5250 tem = ggc_alloc_sequence_stack ();
5252 tem->next = seq_stack;
5253 tem->first = get_insns ();
5254 tem->last = get_last_insn ();
5262 /* Set up the insn chain starting with FIRST as the current sequence,
5263 saving the previously current one. See the documentation for
5264 start_sequence for more information about how to use this function. */
5267 push_to_sequence (rtx first)
5273 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5275 set_first_insn (first);
5276 set_last_insn (last);
5279 /* Like push_to_sequence, but take the last insn as an argument to avoid
5280 looping through the list. */
5283 push_to_sequence2 (rtx first, rtx last)
5287 set_first_insn (first);
5288 set_last_insn (last);
5291 /* Set up the outer-level insn chain
5292 as the current sequence, saving the previously current one. */
5295 push_topmost_sequence (void)
5297 struct sequence_stack *stack, *top = NULL;
5301 for (stack = seq_stack; stack; stack = stack->next)
5304 set_first_insn (top->first);
5305 set_last_insn (top->last);
5308 /* After emitting to the outer-level insn chain, update the outer-level
5309 insn chain, and restore the previous saved state. */
5312 pop_topmost_sequence (void)
5314 struct sequence_stack *stack, *top = NULL;
5316 for (stack = seq_stack; stack; stack = stack->next)
5319 top->first = get_insns ();
5320 top->last = get_last_insn ();
5325 /* After emitting to a sequence, restore previous saved state.
5327 To get the contents of the sequence just made, you must call
5328 `get_insns' *before* calling here.
5330 If the compiler might have deferred popping arguments while
5331 generating this sequence, and this sequence will not be immediately
5332 inserted into the instruction stream, use do_pending_stack_adjust
5333 before calling get_insns. That will ensure that the deferred
5334 pops are inserted into this sequence, and not into some random
5335 location in the instruction stream. See INHIBIT_DEFER_POP for more
5336 information about deferred popping of arguments. */
5341 struct sequence_stack *tem = seq_stack;
5343 set_first_insn (tem->first);
5344 set_last_insn (tem->last);
5345 seq_stack = tem->next;
5347 memset (tem, 0, sizeof (*tem));
5348 tem->next = free_sequence_stack;
5349 free_sequence_stack = tem;
5352 /* Return 1 if currently emitting into a sequence. */
5355 in_sequence_p (void)
5357 return seq_stack != 0;
5360 /* Put the various virtual registers into REGNO_REG_RTX. */
5363 init_virtual_regs (void)
5365 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5366 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5367 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5368 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5369 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5370 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5371 = virtual_preferred_stack_boundary_rtx;
5375 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5376 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5377 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5378 static int copy_insn_n_scratches;
5380 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5381 copied an ASM_OPERANDS.
5382 In that case, it is the original input-operand vector. */
5383 static rtvec orig_asm_operands_vector;
5385 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5386 copied an ASM_OPERANDS.
5387 In that case, it is the copied input-operand vector. */
5388 static rtvec copy_asm_operands_vector;
5390 /* Likewise for the constraints vector. */
5391 static rtvec orig_asm_constraints_vector;
5392 static rtvec copy_asm_constraints_vector;
5394 /* Recursively create a new copy of an rtx for copy_insn.
5395 This function differs from copy_rtx in that it handles SCRATCHes and
5396 ASM_OPERANDs properly.
5397 Normally, this function is not used directly; use copy_insn as front end.
5398 However, you could first copy an insn pattern with copy_insn and then use
5399 this function afterwards to properly copy any REG_NOTEs containing
5403 copy_insn_1 (rtx orig)
5408 const char *format_ptr;
5413 code = GET_CODE (orig);
5428 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5433 for (i = 0; i < copy_insn_n_scratches; i++)
5434 if (copy_insn_scratch_in[i] == orig)
5435 return copy_insn_scratch_out[i];
5439 if (shared_const_p (orig))
5443 /* A MEM with a constant address is not sharable. The problem is that
5444 the constant address may need to be reloaded. If the mem is shared,
5445 then reloading one copy of this mem will cause all copies to appear
5446 to have been reloaded. */
5452 /* Copy the various flags, fields, and other information. We assume
5453 that all fields need copying, and then clear the fields that should
5454 not be copied. That is the sensible default behavior, and forces
5455 us to explicitly document why we are *not* copying a flag. */
5456 copy = shallow_copy_rtx (orig);
5458 /* We do not copy the USED flag, which is used as a mark bit during
5459 walks over the RTL. */
5460 RTX_FLAG (copy, used) = 0;
5462 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5465 RTX_FLAG (copy, jump) = 0;
5466 RTX_FLAG (copy, call) = 0;
5467 RTX_FLAG (copy, frame_related) = 0;
5470 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5472 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5473 switch (*format_ptr++)
5476 if (XEXP (orig, i) != NULL)
5477 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5482 if (XVEC (orig, i) == orig_asm_constraints_vector)
5483 XVEC (copy, i) = copy_asm_constraints_vector;
5484 else if (XVEC (orig, i) == orig_asm_operands_vector)
5485 XVEC (copy, i) = copy_asm_operands_vector;
5486 else if (XVEC (orig, i) != NULL)
5488 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5489 for (j = 0; j < XVECLEN (copy, i); j++)
5490 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5501 /* These are left unchanged. */
5508 if (code == SCRATCH)
5510 i = copy_insn_n_scratches++;
5511 gcc_assert (i < MAX_RECOG_OPERANDS);
5512 copy_insn_scratch_in[i] = orig;
5513 copy_insn_scratch_out[i] = copy;
5515 else if (code == ASM_OPERANDS)
5517 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5518 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5519 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5520 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5526 /* Create a new copy of an rtx.
5527 This function differs from copy_rtx in that it handles SCRATCHes and
5528 ASM_OPERANDs properly.
5529 INSN doesn't really have to be a full INSN; it could be just the
5532 copy_insn (rtx insn)
5534 copy_insn_n_scratches = 0;
5535 orig_asm_operands_vector = 0;
5536 orig_asm_constraints_vector = 0;
5537 copy_asm_operands_vector = 0;
5538 copy_asm_constraints_vector = 0;
5539 return copy_insn_1 (insn);
5542 /* Initialize data structures and variables in this file
5543 before generating rtl for each function. */
5548 set_first_insn (NULL);
5549 set_last_insn (NULL);
5550 if (MIN_NONDEBUG_INSN_UID)
5551 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5554 cur_debug_insn_uid = 1;
5555 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5556 last_location = UNKNOWN_LOCATION;
5557 first_label_num = label_num;
5560 /* Init the tables that describe all the pseudo regs. */
5562 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5564 crtl->emit.regno_pointer_align
5565 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5567 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5569 /* Put copies of all the hard registers into regno_reg_rtx. */
5570 memcpy (regno_reg_rtx,
5571 initial_regno_reg_rtx,
5572 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5574 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5575 init_virtual_regs ();
5577 /* Indicate that the virtual registers and stack locations are
5579 REG_POINTER (stack_pointer_rtx) = 1;
5580 REG_POINTER (frame_pointer_rtx) = 1;
5581 REG_POINTER (hard_frame_pointer_rtx) = 1;
5582 REG_POINTER (arg_pointer_rtx) = 1;
5584 REG_POINTER (virtual_incoming_args_rtx) = 1;
5585 REG_POINTER (virtual_stack_vars_rtx) = 1;
5586 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5587 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5588 REG_POINTER (virtual_cfa_rtx) = 1;
5590 #ifdef STACK_BOUNDARY
5591 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5592 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5593 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5594 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5596 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5597 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5598 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5599 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5600 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5603 #ifdef INIT_EXPANDERS
5608 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5611 gen_const_vector (enum machine_mode mode, int constant)
5616 enum machine_mode inner;
5618 units = GET_MODE_NUNITS (mode);
5619 inner = GET_MODE_INNER (mode);
5621 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5623 v = rtvec_alloc (units);
5625 /* We need to call this function after we set the scalar const_tiny_rtx
5627 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5629 for (i = 0; i < units; ++i)
5630 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5632 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5636 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5637 all elements are zero, and the one vector when all elements are one. */
5639 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5641 enum machine_mode inner = GET_MODE_INNER (mode);
5642 int nunits = GET_MODE_NUNITS (mode);
5646 /* Check to see if all of the elements have the same value. */
5647 x = RTVEC_ELT (v, nunits - 1);
5648 for (i = nunits - 2; i >= 0; i--)
5649 if (RTVEC_ELT (v, i) != x)
5652 /* If the values are all the same, check to see if we can use one of the
5653 standard constant vectors. */
5656 if (x == CONST0_RTX (inner))
5657 return CONST0_RTX (mode);
5658 else if (x == CONST1_RTX (inner))
5659 return CONST1_RTX (mode);
5662 return gen_rtx_raw_CONST_VECTOR (mode, v);
5665 /* Initialise global register information required by all functions. */
5668 init_emit_regs (void)
5672 /* Reset register attributes */
5673 htab_empty (reg_attrs_htab);
5675 /* We need reg_raw_mode, so initialize the modes now. */
5676 init_reg_modes_target ();
5678 /* Assign register numbers to the globally defined register rtx. */
5679 pc_rtx = gen_rtx_PC (VOIDmode);
5680 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5681 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5682 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5683 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5684 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5685 virtual_incoming_args_rtx =
5686 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5687 virtual_stack_vars_rtx =
5688 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5689 virtual_stack_dynamic_rtx =
5690 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5691 virtual_outgoing_args_rtx =
5692 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5693 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5694 virtual_preferred_stack_boundary_rtx =
5695 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5697 /* Initialize RTL for commonly used hard registers. These are
5698 copied into regno_reg_rtx as we begin to compile each function. */
5699 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5700 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5702 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5703 return_address_pointer_rtx
5704 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5707 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5708 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5710 pic_offset_table_rtx = NULL_RTX;
5713 /* Create some permanent unique rtl objects shared between all functions. */
5716 init_emit_once (void)
5719 enum machine_mode mode;
5720 enum machine_mode double_mode;
5722 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5724 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5725 const_int_htab_eq, NULL);
5727 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5728 const_double_htab_eq, NULL);
5730 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5731 const_fixed_htab_eq, NULL);
5733 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5734 mem_attrs_htab_eq, NULL);
5735 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5736 reg_attrs_htab_eq, NULL);
5738 /* Compute the word and byte modes. */
5740 byte_mode = VOIDmode;
5741 word_mode = VOIDmode;
5742 double_mode = VOIDmode;
5744 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5746 mode = GET_MODE_WIDER_MODE (mode))
5748 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5749 && byte_mode == VOIDmode)
5752 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5753 && word_mode == VOIDmode)
5757 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5759 mode = GET_MODE_WIDER_MODE (mode))
5761 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5762 && double_mode == VOIDmode)
5766 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5768 #ifdef INIT_EXPANDERS
5769 /* This is to initialize {init|mark|free}_machine_status before the first
5770 call to push_function_context_to. This is needed by the Chill front
5771 end which calls push_function_context_to before the first call to
5772 init_function_start. */
5776 /* Create the unique rtx's for certain rtx codes and operand values. */
5778 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5779 tries to use these variables. */
5780 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5781 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5782 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5784 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5785 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5786 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5788 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5790 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5791 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5792 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5797 dconsthalf = dconst1;
5798 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5800 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5802 const REAL_VALUE_TYPE *const r =
5803 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5805 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5807 mode = GET_MODE_WIDER_MODE (mode))
5808 const_tiny_rtx[i][(int) mode] =
5809 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5811 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5813 mode = GET_MODE_WIDER_MODE (mode))
5814 const_tiny_rtx[i][(int) mode] =
5815 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5817 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5819 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5821 mode = GET_MODE_WIDER_MODE (mode))
5822 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5824 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5826 mode = GET_MODE_WIDER_MODE (mode))
5827 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5830 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5832 mode = GET_MODE_WIDER_MODE (mode))
5834 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5835 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5838 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5840 mode = GET_MODE_WIDER_MODE (mode))
5842 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5843 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5846 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5848 mode = GET_MODE_WIDER_MODE (mode))
5850 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5851 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5854 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5856 mode = GET_MODE_WIDER_MODE (mode))
5858 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5859 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5862 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5864 mode = GET_MODE_WIDER_MODE (mode))
5866 FCONST0(mode).data.high = 0;
5867 FCONST0(mode).data.low = 0;
5868 FCONST0(mode).mode = mode;
5869 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5870 FCONST0 (mode), mode);
5873 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5875 mode = GET_MODE_WIDER_MODE (mode))
5877 FCONST0(mode).data.high = 0;
5878 FCONST0(mode).data.low = 0;
5879 FCONST0(mode).mode = mode;
5880 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5881 FCONST0 (mode), mode);
5884 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5886 mode = GET_MODE_WIDER_MODE (mode))
5888 FCONST0(mode).data.high = 0;
5889 FCONST0(mode).data.low = 0;
5890 FCONST0(mode).mode = mode;
5891 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5892 FCONST0 (mode), mode);
5894 /* We store the value 1. */
5895 FCONST1(mode).data.high = 0;
5896 FCONST1(mode).data.low = 0;
5897 FCONST1(mode).mode = mode;
5898 lshift_double (1, 0, GET_MODE_FBIT (mode),
5899 2 * HOST_BITS_PER_WIDE_INT,
5900 &FCONST1(mode).data.low,
5901 &FCONST1(mode).data.high,
5902 SIGNED_FIXED_POINT_MODE_P (mode));
5903 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5904 FCONST1 (mode), mode);
5907 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5909 mode = GET_MODE_WIDER_MODE (mode))
5911 FCONST0(mode).data.high = 0;
5912 FCONST0(mode).data.low = 0;
5913 FCONST0(mode).mode = mode;
5914 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5915 FCONST0 (mode), mode);
5917 /* We store the value 1. */
5918 FCONST1(mode).data.high = 0;
5919 FCONST1(mode).data.low = 0;
5920 FCONST1(mode).mode = mode;
5921 lshift_double (1, 0, GET_MODE_FBIT (mode),
5922 2 * HOST_BITS_PER_WIDE_INT,
5923 &FCONST1(mode).data.low,
5924 &FCONST1(mode).data.high,
5925 SIGNED_FIXED_POINT_MODE_P (mode));
5926 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5927 FCONST1 (mode), mode);
5930 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5932 mode = GET_MODE_WIDER_MODE (mode))
5934 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5937 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5939 mode = GET_MODE_WIDER_MODE (mode))
5941 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5944 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5946 mode = GET_MODE_WIDER_MODE (mode))
5948 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5949 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5952 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5954 mode = GET_MODE_WIDER_MODE (mode))
5956 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5957 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5960 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5961 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5962 const_tiny_rtx[0][i] = const0_rtx;
5964 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5965 if (STORE_FLAG_VALUE == 1)
5966 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5969 /* Produce exact duplicate of insn INSN after AFTER.
5970 Care updating of libcall regions if present. */
5973 emit_copy_of_insn_after (rtx insn, rtx after)
5977 switch (GET_CODE (insn))
5980 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5984 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5988 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5992 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5993 if (CALL_INSN_FUNCTION_USAGE (insn))
5994 CALL_INSN_FUNCTION_USAGE (new_rtx)
5995 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5996 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5997 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5998 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5999 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6000 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6007 /* Update LABEL_NUSES. */
6008 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6010 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
6012 /* If the old insn is frame related, then so is the new one. This is
6013 primarily needed for IA-64 unwind info which marks epilogue insns,
6014 which may be duplicated by the basic block reordering code. */
6015 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6017 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6018 will make them. REG_LABEL_TARGETs are created there too, but are
6019 supposed to be sticky, so we copy them. */
6020 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6021 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6023 if (GET_CODE (link) == EXPR_LIST)
6024 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6025 copy_insn_1 (XEXP (link, 0)));
6027 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
6030 INSN_CODE (new_rtx) = INSN_CODE (insn);
6034 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6036 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6038 if (hard_reg_clobbers[mode][regno])
6039 return hard_reg_clobbers[mode][regno];
6041 return (hard_reg_clobbers[mode][regno] =
6042 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6045 #include "gt-emit-rtl.h"