1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* Middle-to-low level generation of rtx code and insns.
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
48 #include "hard-reg-set.h"
50 #include "insn-config.h"
55 #include "basic-block.h"
58 #include "langhooks.h"
60 /* Commonly used modes. */
62 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
63 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
64 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
65 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
68 /* This is *not* reset after each function. It gives each CODE_LABEL
69 in the entire compilation a unique label number. */
71 static int label_num = 1;
73 /* Highest label number in current function.
74 Zero means use the value of label_num instead.
75 This is nonzero only when belatedly compiling an inline function. */
77 static int last_label_num;
79 /* Value label_num had when set_new_first_and_last_label_number was called.
80 If label_num has not changed since then, last_label_num is valid. */
82 static int base_label_num;
84 /* Nonzero means do not generate NOTEs for source line numbers. */
86 static int no_line_numbers;
88 /* Commonly used rtx's, so that we only need space for one copy.
89 These are initialized once for the entire compilation.
90 All of these are unique; no other rtx-object will be equal to any
93 rtx global_rtl[GR_MAX];
95 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
96 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
97 record a copy of const[012]_rtx. */
99 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
108 /* All references to the following fixed hard registers go through
109 these unique rtl objects. On machines where the frame-pointer and
110 arg-pointer are the same register, they use the same unique object.
112 After register allocation, other rtl objects which used to be pseudo-regs
113 may be clobbered to refer to the frame-pointer register.
114 But references that were originally to the frame-pointer can be
115 distinguished from the others because they contain frame_pointer_rtx.
117 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
118 tricky: until register elimination has taken place hard_frame_pointer_rtx
119 should be used if it is being set, and frame_pointer_rtx otherwise. After
120 register elimination hard_frame_pointer_rtx should always be used.
121 On machines where the two registers are same (most) then these are the
124 In an inline procedure, the stack and frame pointer rtxs may not be
125 used for anything else. */
126 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
127 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
128 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
129 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
130 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
132 /* This is used to implement __builtin_return_address for some machines.
133 See for instance the MIPS port. */
134 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
136 /* We make one copy of (const_int C) where C is in
137 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
138 to save space during the compilation and simplify comparisons of
141 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
143 /* A hash table storing CONST_INTs whose absolute value is greater
144 than MAX_SAVED_CONST_INT. */
146 static htab_t const_int_htab;
148 /* A hash table storing memory attribute structures. */
149 static htab_t mem_attrs_htab;
151 /* A hash table storing all CONST_DOUBLEs. */
152 static htab_t const_double_htab;
154 /* start_sequence and gen_sequence can make a lot of rtx expressions which are
155 shortly thrown away. We use two mechanisms to prevent this waste:
157 For sizes up to 5 elements, we keep a SEQUENCE and its associated
158 rtvec for use by gen_sequence. One entry for each size is
159 sufficient because most cases are calls to gen_sequence followed by
160 immediately emitting the SEQUENCE. Reuse is safe since emitting a
161 sequence is destructive on the insn in it anyway and hence can't be
164 We do not bother to save this cached data over nested function calls.
165 Instead, we just reinitialize them. */
167 #define SEQUENCE_RESULT_SIZE 5
169 static rtx sequence_result[SEQUENCE_RESULT_SIZE];
171 /* During RTL generation, we also keep a list of free INSN rtl codes. */
172 static rtx free_insn;
174 #define first_insn (cfun->emit->x_first_insn)
175 #define last_insn (cfun->emit->x_last_insn)
176 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
177 #define last_linenum (cfun->emit->x_last_linenum)
178 #define last_filename (cfun->emit->x_last_filename)
179 #define first_label_num (cfun->emit->x_first_label_num)
181 static rtx make_jump_insn_raw PARAMS ((rtx));
182 static rtx make_call_insn_raw PARAMS ((rtx));
183 static rtx find_line_note PARAMS ((rtx));
184 static void mark_sequence_stack PARAMS ((struct sequence_stack *));
185 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
187 static void unshare_all_rtl_1 PARAMS ((rtx));
188 static void unshare_all_decls PARAMS ((tree));
189 static void reset_used_decls PARAMS ((tree));
190 static void mark_label_nuses PARAMS ((rtx));
191 static hashval_t const_int_htab_hash PARAMS ((const void *));
192 static int const_int_htab_eq PARAMS ((const void *,
194 static hashval_t const_double_htab_hash PARAMS ((const void *));
195 static int const_double_htab_eq PARAMS ((const void *,
197 static rtx lookup_const_double PARAMS ((rtx));
198 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
199 static int mem_attrs_htab_eq PARAMS ((const void *,
201 static void mem_attrs_mark PARAMS ((const void *));
202 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
205 static tree component_ref_for_mem_expr PARAMS ((tree));
206 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
208 /* Probability of the conditional branch currently proceeded by try_split.
209 Set to -1 otherwise. */
210 int split_branch_probability = -1;
212 /* Returns a hash code for X (which is a really a CONST_INT). */
215 const_int_htab_hash (x)
218 return (hashval_t) INTVAL ((struct rtx_def *) x);
221 /* Returns non-zero if the value represented by X (which is really a
222 CONST_INT) is the same as that given by Y (which is really a
226 const_int_htab_eq (x, y)
230 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
233 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
235 const_double_htab_hash (x)
242 for (i = 0; i < sizeof(CONST_DOUBLE_FORMAT)-1; i++)
243 h ^= XWINT (value, i);
247 /* Returns non-zero if the value represented by X (really a ...)
248 is the same as that represented by Y (really a ...) */
250 const_double_htab_eq (x, y)
254 rtx a = (rtx)x, b = (rtx)y;
257 if (GET_MODE (a) != GET_MODE (b))
259 for (i = 0; i < sizeof(CONST_DOUBLE_FORMAT)-1; i++)
260 if (XWINT (a, i) != XWINT (b, i))
266 /* Returns a hash code for X (which is a really a mem_attrs *). */
269 mem_attrs_htab_hash (x)
272 mem_attrs *p = (mem_attrs *) x;
274 return (p->alias ^ (p->align * 1000)
275 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
276 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
280 /* Returns non-zero if the value represented by X (which is really a
281 mem_attrs *) is the same as that given by Y (which is also really a
285 mem_attrs_htab_eq (x, y)
289 mem_attrs *p = (mem_attrs *) x;
290 mem_attrs *q = (mem_attrs *) y;
292 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
293 && p->size == q->size && p->align == q->align);
296 /* This routine is called when we determine that we need a mem_attrs entry.
297 It marks the associated decl and RTL as being used, if present. */
303 mem_attrs *p = (mem_attrs *) x;
306 ggc_mark_tree (p->expr);
309 ggc_mark_rtx (p->offset);
312 ggc_mark_rtx (p->size);
315 /* Allocate a new mem_attrs structure and insert it into the hash table if
316 one identical to it is not already in the table. We are doing this for
320 get_mem_attrs (alias, expr, offset, size, align, mode)
326 enum machine_mode mode;
331 /* If everything is the default, we can just return zero. */
332 if (alias == 0 && expr == 0 && offset == 0
334 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
335 && (align == BITS_PER_UNIT
337 && mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
342 attrs.offset = offset;
346 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
349 *slot = ggc_alloc (sizeof (mem_attrs));
350 memcpy (*slot, &attrs, sizeof (mem_attrs));
356 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
357 don't attempt to share with the various global pieces of rtl (such as
358 frame_pointer_rtx). */
361 gen_raw_REG (mode, regno)
362 enum machine_mode mode;
365 rtx x = gen_rtx_raw_REG (mode, regno);
366 ORIGINAL_REGNO (x) = regno;
370 /* There are some RTL codes that require special attention; the generation
371 functions do the raw handling. If you add to this list, modify
372 special_rtx in gengenrtl.c as well. */
375 gen_rtx_CONST_INT (mode, arg)
376 enum machine_mode mode ATTRIBUTE_UNUSED;
381 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
382 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
384 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
385 if (const_true_rtx && arg == STORE_FLAG_VALUE)
386 return const_true_rtx;
389 /* Look up the CONST_INT in the hash table. */
390 slot = htab_find_slot_with_hash (const_int_htab, &arg,
391 (hashval_t) arg, INSERT);
393 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
399 gen_int_mode (c, mode)
401 enum machine_mode mode;
403 return GEN_INT (trunc_int_for_mode (c, mode));
406 /* CONST_DOUBLEs might be created from pairs of integers, or from
407 REAL_VALUE_TYPEs. Also, their length is known only at run time,
408 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
410 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
411 hash table. If so, return its counterpart; otherwise add it
412 to the hash table and return it. */
414 lookup_const_double (real)
417 void **slot = htab_find_slot (const_double_htab, real, INSERT);
424 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
425 VALUE in mode MODE. */
427 const_double_from_real_value (value, mode)
428 REAL_VALUE_TYPE value;
429 enum machine_mode mode;
431 rtx real = rtx_alloc (CONST_DOUBLE);
432 PUT_MODE (real, mode);
434 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
436 return lookup_const_double (real);
439 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
440 of ints: I0 is the low-order word and I1 is the high-order word.
441 Do not use this routine for non-integer modes; convert to
442 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
445 immed_double_const (i0, i1, mode)
446 HOST_WIDE_INT i0, i1;
447 enum machine_mode mode;
452 if (mode != VOIDmode)
455 if (GET_MODE_CLASS (mode) != MODE_INT
456 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
459 /* We clear out all bits that don't belong in MODE, unless they and
460 our sign bit are all one. So we get either a reasonable negative
461 value or a reasonable unsigned value for this mode. */
462 width = GET_MODE_BITSIZE (mode);
463 if (width < HOST_BITS_PER_WIDE_INT
464 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
465 != ((HOST_WIDE_INT) (-1) << (width - 1))))
466 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
467 else if (width == HOST_BITS_PER_WIDE_INT
468 && ! (i1 == ~0 && i0 < 0))
470 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
471 /* We cannot represent this value as a constant. */
474 /* If this would be an entire word for the target, but is not for
475 the host, then sign-extend on the host so that the number will
476 look the same way on the host that it would on the target.
478 For example, when building a 64 bit alpha hosted 32 bit sparc
479 targeted compiler, then we want the 32 bit unsigned value -1 to be
480 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
481 The latter confuses the sparc backend. */
483 if (width < HOST_BITS_PER_WIDE_INT
484 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
485 i0 |= ((HOST_WIDE_INT) (-1) << width);
487 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
490 ??? Strictly speaking, this is wrong if we create a CONST_INT for
491 a large unsigned constant with the size of MODE being
492 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
493 in a wider mode. In that case we will mis-interpret it as a
496 Unfortunately, the only alternative is to make a CONST_DOUBLE for
497 any constant in any mode if it is an unsigned constant larger
498 than the maximum signed integer in an int on the host. However,
499 doing this will break everyone that always expects to see a
500 CONST_INT for SImode and smaller.
502 We have always been making CONST_INTs in this case, so nothing
503 new is being broken. */
505 if (width <= HOST_BITS_PER_WIDE_INT)
506 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
509 /* If this integer fits in one word, return a CONST_INT. */
510 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
513 /* We use VOIDmode for integers. */
514 value = rtx_alloc (CONST_DOUBLE);
515 PUT_MODE (value, VOIDmode);
517 CONST_DOUBLE_LOW (value) = i0;
518 CONST_DOUBLE_HIGH (value) = i1;
520 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
521 XWINT (value, i) = 0;
523 return lookup_const_double (value);
527 gen_rtx_REG (mode, regno)
528 enum machine_mode mode;
531 /* In case the MD file explicitly references the frame pointer, have
532 all such references point to the same frame pointer. This is
533 used during frame pointer elimination to distinguish the explicit
534 references to these registers from pseudos that happened to be
537 If we have eliminated the frame pointer or arg pointer, we will
538 be using it as a normal register, for example as a spill
539 register. In such cases, we might be accessing it in a mode that
540 is not Pmode and therefore cannot use the pre-allocated rtx.
542 Also don't do this when we are making new REGs in reload, since
543 we don't want to get confused with the real pointers. */
545 if (mode == Pmode && !reload_in_progress)
547 if (regno == FRAME_POINTER_REGNUM)
548 return frame_pointer_rtx;
549 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
550 if (regno == HARD_FRAME_POINTER_REGNUM)
551 return hard_frame_pointer_rtx;
553 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
554 if (regno == ARG_POINTER_REGNUM)
555 return arg_pointer_rtx;
557 #ifdef RETURN_ADDRESS_POINTER_REGNUM
558 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
559 return return_address_pointer_rtx;
561 if (regno == PIC_OFFSET_TABLE_REGNUM
562 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
563 return pic_offset_table_rtx;
564 if (regno == STACK_POINTER_REGNUM)
565 return stack_pointer_rtx;
568 return gen_raw_REG (mode, regno);
572 gen_rtx_MEM (mode, addr)
573 enum machine_mode mode;
576 rtx rt = gen_rtx_raw_MEM (mode, addr);
578 /* This field is not cleared by the mere allocation of the rtx, so
586 gen_rtx_SUBREG (mode, reg, offset)
587 enum machine_mode mode;
591 /* This is the most common failure type.
592 Catch it early so we can see who does it. */
593 if ((offset % GET_MODE_SIZE (mode)) != 0)
596 /* This check isn't usable right now because combine will
597 throw arbitrary crap like a CALL into a SUBREG in
598 gen_lowpart_for_combine so we must just eat it. */
600 /* Check for this too. */
601 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
604 return gen_rtx_raw_SUBREG (mode, reg, offset);
607 /* Generate a SUBREG representing the least-significant part of REG if MODE
608 is smaller than mode of REG, otherwise paradoxical SUBREG. */
611 gen_lowpart_SUBREG (mode, reg)
612 enum machine_mode mode;
615 enum machine_mode inmode;
617 inmode = GET_MODE (reg);
618 if (inmode == VOIDmode)
620 return gen_rtx_SUBREG (mode, reg,
621 subreg_lowpart_offset (mode, inmode));
624 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
626 ** This routine generates an RTX of the size specified by
627 ** <code>, which is an RTX code. The RTX structure is initialized
628 ** from the arguments <element1> through <elementn>, which are
629 ** interpreted according to the specific RTX type's format. The
630 ** special machine mode associated with the rtx (if any) is specified
633 ** gen_rtx can be invoked in a way which resembles the lisp-like
634 ** rtx it will generate. For example, the following rtx structure:
636 ** (plus:QI (mem:QI (reg:SI 1))
637 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
639 ** ...would be generated by the following C code:
641 ** gen_rtx (PLUS, QImode,
642 ** gen_rtx (MEM, QImode,
643 ** gen_rtx (REG, SImode, 1)),
644 ** gen_rtx (MEM, QImode,
645 ** gen_rtx (PLUS, SImode,
646 ** gen_rtx (REG, SImode, 2),
647 ** gen_rtx (REG, SImode, 3)))),
652 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
654 int i; /* Array indices... */
655 const char *fmt; /* Current rtx's format... */
656 rtx rt_val; /* RTX to return to caller... */
659 VA_FIXEDARG (p, enum rtx_code, code);
660 VA_FIXEDARG (p, enum machine_mode, mode);
665 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
670 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
671 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
673 rt_val = immed_double_const (arg0, arg1, mode);
678 rt_val = gen_rtx_REG (mode, va_arg (p, int));
682 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
686 rt_val = rtx_alloc (code); /* Allocate the storage space. */
687 rt_val->mode = mode; /* Store the machine mode... */
689 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
690 for (i = 0; i < GET_RTX_LENGTH (code); i++)
694 case '0': /* Unused field. */
697 case 'i': /* An integer? */
698 XINT (rt_val, i) = va_arg (p, int);
701 case 'w': /* A wide integer? */
702 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
705 case 's': /* A string? */
706 XSTR (rt_val, i) = va_arg (p, char *);
709 case 'e': /* An expression? */
710 case 'u': /* An insn? Same except when printing. */
711 XEXP (rt_val, i) = va_arg (p, rtx);
714 case 'E': /* An RTX vector? */
715 XVEC (rt_val, i) = va_arg (p, rtvec);
718 case 'b': /* A bitmap? */
719 XBITMAP (rt_val, i) = va_arg (p, bitmap);
722 case 't': /* A tree? */
723 XTREE (rt_val, i) = va_arg (p, tree);
737 /* gen_rtvec (n, [rt1, ..., rtn])
739 ** This routine creates an rtvec and stores within it the
740 ** pointers to rtx's which are its arguments.
745 gen_rtvec VPARAMS ((int n, ...))
751 VA_FIXEDARG (p, int, n);
754 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
756 vector = (rtx *) alloca (n * sizeof (rtx));
758 for (i = 0; i < n; i++)
759 vector[i] = va_arg (p, rtx);
761 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
765 return gen_rtvec_v (save_n, vector);
769 gen_rtvec_v (n, argp)
777 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
779 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
781 for (i = 0; i < n; i++)
782 rt_val->elem[i] = *argp++;
787 /* Generate a REG rtx for a new pseudo register of mode MODE.
788 This pseudo is assigned the next sequential register number. */
792 enum machine_mode mode;
794 struct function *f = cfun;
797 /* Don't let anything called after initial flow analysis create new
802 if (generating_concat_p
803 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
804 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
806 /* For complex modes, don't make a single pseudo.
807 Instead, make a CONCAT of two pseudos.
808 This allows noncontiguous allocation of the real and imaginary parts,
809 which makes much better code. Besides, allocating DCmode
810 pseudos overstrains reload on some machines like the 386. */
811 rtx realpart, imagpart;
812 int size = GET_MODE_UNIT_SIZE (mode);
813 enum machine_mode partmode
814 = mode_for_size (size * BITS_PER_UNIT,
815 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
816 ? MODE_FLOAT : MODE_INT),
819 realpart = gen_reg_rtx (partmode);
820 imagpart = gen_reg_rtx (partmode);
821 return gen_rtx_CONCAT (mode, realpart, imagpart);
824 /* Make sure regno_pointer_align, regno_decl, and regno_reg_rtx are large
825 enough to have an element for this pseudo reg number. */
827 if (reg_rtx_no == f->emit->regno_pointer_align_length)
829 int old_size = f->emit->regno_pointer_align_length;
834 new = xrealloc (f->emit->regno_pointer_align, old_size * 2);
835 memset (new + old_size, 0, old_size);
836 f->emit->regno_pointer_align = (unsigned char *) new;
838 new1 = (rtx *) xrealloc (f->emit->x_regno_reg_rtx,
839 old_size * 2 * sizeof (rtx));
840 memset (new1 + old_size, 0, old_size * sizeof (rtx));
841 regno_reg_rtx = new1;
843 new2 = (tree *) xrealloc (f->emit->regno_decl,
844 old_size * 2 * sizeof (tree));
845 memset (new2 + old_size, 0, old_size * sizeof (tree));
846 f->emit->regno_decl = new2;
848 f->emit->regno_pointer_align_length = old_size * 2;
851 val = gen_raw_REG (mode, reg_rtx_no);
852 regno_reg_rtx[reg_rtx_no++] = val;
856 /* Identify REG (which may be a CONCAT) as a user register. */
862 if (GET_CODE (reg) == CONCAT)
864 REG_USERVAR_P (XEXP (reg, 0)) = 1;
865 REG_USERVAR_P (XEXP (reg, 1)) = 1;
867 else if (GET_CODE (reg) == REG)
868 REG_USERVAR_P (reg) = 1;
873 /* Identify REG as a probable pointer register and show its alignment
874 as ALIGN, if nonzero. */
877 mark_reg_pointer (reg, align)
881 if (! REG_POINTER (reg))
883 REG_POINTER (reg) = 1;
886 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
888 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
889 /* We can no-longer be sure just how aligned this pointer is */
890 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
893 /* Return 1 plus largest pseudo reg number used in the current function. */
901 /* Return 1 + the largest label number used so far in the current function. */
906 if (last_label_num && label_num == base_label_num)
907 return last_label_num;
911 /* Return first label number used in this function (if any were used). */
914 get_first_label_num ()
916 return first_label_num;
919 /* Return the final regno of X, which is a SUBREG of a hard
922 subreg_hard_regno (x, check_mode)
926 enum machine_mode mode = GET_MODE (x);
927 unsigned int byte_offset, base_regno, final_regno;
928 rtx reg = SUBREG_REG (x);
930 /* This is where we attempt to catch illegal subregs
931 created by the compiler. */
932 if (GET_CODE (x) != SUBREG
933 || GET_CODE (reg) != REG)
935 base_regno = REGNO (reg);
936 if (base_regno >= FIRST_PSEUDO_REGISTER)
938 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
941 /* Catch non-congruent offsets too. */
942 byte_offset = SUBREG_BYTE (x);
943 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
946 final_regno = subreg_regno (x);
951 /* Return a value representing some low-order bits of X, where the number
952 of low-order bits is given by MODE. Note that no conversion is done
953 between floating-point and fixed-point values, rather, the bit
954 representation is returned.
956 This function handles the cases in common between gen_lowpart, below,
957 and two variants in cse.c and combine.c. These are the cases that can
958 be safely handled at all points in the compilation.
960 If this is not a case we can handle, return 0. */
963 gen_lowpart_common (mode, x)
964 enum machine_mode mode;
967 int msize = GET_MODE_SIZE (mode);
968 int xsize = GET_MODE_SIZE (GET_MODE (x));
971 if (GET_MODE (x) == mode)
974 /* MODE must occupy no more words than the mode of X. */
975 if (GET_MODE (x) != VOIDmode
976 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
977 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
980 offset = subreg_lowpart_offset (mode, GET_MODE (x));
982 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
983 && (GET_MODE_CLASS (mode) == MODE_INT
984 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
986 /* If we are getting the low-order part of something that has been
987 sign- or zero-extended, we can either just use the object being
988 extended or make a narrower extension. If we want an even smaller
989 piece than the size of the object being extended, call ourselves
992 This case is used mostly by combine and cse. */
994 if (GET_MODE (XEXP (x, 0)) == mode)
996 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
997 return gen_lowpart_common (mode, XEXP (x, 0));
998 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
999 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1001 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1002 || GET_CODE (x) == CONCAT)
1003 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
1004 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1005 from the low-order part of the constant. */
1006 else if ((GET_MODE_CLASS (mode) == MODE_INT
1007 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1008 && GET_MODE (x) == VOIDmode
1009 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1011 /* If MODE is twice the host word size, X is already the desired
1012 representation. Otherwise, if MODE is wider than a word, we can't
1013 do this. If MODE is exactly a word, return just one CONST_INT. */
1015 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1017 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1019 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1020 return (GET_CODE (x) == CONST_INT ? x
1021 : GEN_INT (CONST_DOUBLE_LOW (x)));
1024 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1025 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1026 : CONST_DOUBLE_LOW (x));
1028 /* Sign extend to HOST_WIDE_INT. */
1029 val = trunc_int_for_mode (val, mode);
1031 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1036 /* The floating-point emulator can handle all conversions between
1037 FP and integer operands. This simplifies reload because it
1038 doesn't have to deal with constructs like (subreg:DI
1039 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1040 /* Single-precision floats are always 32-bits and double-precision
1041 floats are always 64-bits. */
1043 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1044 && GET_MODE_BITSIZE (mode) == 32
1045 && GET_CODE (x) == CONST_INT)
1051 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
1052 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1054 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1055 && GET_MODE_BITSIZE (mode) == 64
1056 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1057 && GET_MODE (x) == VOIDmode)
1061 HOST_WIDE_INT low, high;
1063 if (GET_CODE (x) == CONST_INT)
1066 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1070 low = CONST_DOUBLE_LOW (x);
1071 high = CONST_DOUBLE_HIGH (x);
1074 #if HOST_BITS_PER_WIDE_INT == 32
1075 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1077 if (WORDS_BIG_ENDIAN)
1078 i[0] = high, i[1] = low;
1080 i[0] = low, i[1] = high;
1085 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
1086 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1088 else if ((GET_MODE_CLASS (mode) == MODE_INT
1089 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1090 && GET_CODE (x) == CONST_DOUBLE
1091 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1094 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1095 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1097 /* Convert 'r' into an array of four 32-bit words in target word
1099 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1100 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1103 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1106 i[3 - 3 * endian] = 0;
1109 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1110 i[2 - 2 * endian] = 0;
1111 i[3 - 2 * endian] = 0;
1114 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1115 i[3 - 3 * endian] = 0;
1118 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1123 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1125 #if HOST_BITS_PER_WIDE_INT == 32
1126 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1128 if (HOST_BITS_PER_WIDE_INT != 64)
1131 return immed_double_const ((((unsigned long) i[3 * endian])
1132 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1133 (((unsigned long) i[2 - endian])
1134 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1139 /* Otherwise, we can't do this. */
1143 /* Return the real part (which has mode MODE) of a complex value X.
1144 This always comes at the low address in memory. */
1147 gen_realpart (mode, x)
1148 enum machine_mode mode;
1151 if (WORDS_BIG_ENDIAN
1152 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1154 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1156 ("can't access real part of complex value in hard register");
1157 else if (WORDS_BIG_ENDIAN)
1158 return gen_highpart (mode, x);
1160 return gen_lowpart (mode, x);
1163 /* Return the imaginary part (which has mode MODE) of a complex value X.
1164 This always comes at the high address in memory. */
1167 gen_imagpart (mode, x)
1168 enum machine_mode mode;
1171 if (WORDS_BIG_ENDIAN)
1172 return gen_lowpart (mode, x);
1173 else if (! WORDS_BIG_ENDIAN
1174 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1176 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1178 ("can't access imaginary part of complex value in hard register");
1180 return gen_highpart (mode, x);
1183 /* Return 1 iff X, assumed to be a SUBREG,
1184 refers to the real part of the complex value in its containing reg.
1185 Complex values are always stored with the real part in the first word,
1186 regardless of WORDS_BIG_ENDIAN. */
1189 subreg_realpart_p (x)
1192 if (GET_CODE (x) != SUBREG)
1195 return ((unsigned int) SUBREG_BYTE (x)
1196 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1199 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1200 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1201 least-significant part of X.
1202 MODE specifies how big a part of X to return;
1203 it usually should not be larger than a word.
1204 If X is a MEM whose address is a QUEUED, the value may be so also. */
1207 gen_lowpart (mode, x)
1208 enum machine_mode mode;
1211 rtx result = gen_lowpart_common (mode, x);
1215 else if (GET_CODE (x) == REG)
1217 /* Must be a hard reg that's not valid in MODE. */
1218 result = gen_lowpart_common (mode, copy_to_reg (x));
1223 else if (GET_CODE (x) == MEM)
1225 /* The only additional case we can do is MEM. */
1227 if (WORDS_BIG_ENDIAN)
1228 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1229 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1231 if (BYTES_BIG_ENDIAN)
1232 /* Adjust the address so that the address-after-the-data
1234 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1235 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1237 return adjust_address (x, mode, offset);
1239 else if (GET_CODE (x) == ADDRESSOF)
1240 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1245 /* Like `gen_lowpart', but refer to the most significant part.
1246 This is used to access the imaginary part of a complex number. */
1249 gen_highpart (mode, x)
1250 enum machine_mode mode;
1253 unsigned int msize = GET_MODE_SIZE (mode);
1256 /* This case loses if X is a subreg. To catch bugs early,
1257 complain if an invalid MODE is used even in other cases. */
1258 if (msize > UNITS_PER_WORD
1259 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1262 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1263 subreg_highpart_offset (mode, GET_MODE (x)));
1265 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1266 the target if we have a MEM. gen_highpart must return a valid operand,
1267 emitting code if necessary to do so. */
1268 if (result != NULL_RTX && GET_CODE (result) == MEM)
1269 result = validize_mem (result);
1276 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1277 be VOIDmode constant. */
1279 gen_highpart_mode (outermode, innermode, exp)
1280 enum machine_mode outermode, innermode;
1283 if (GET_MODE (exp) != VOIDmode)
1285 if (GET_MODE (exp) != innermode)
1287 return gen_highpart (outermode, exp);
1289 return simplify_gen_subreg (outermode, exp, innermode,
1290 subreg_highpart_offset (outermode, innermode));
1293 /* Return offset in bytes to get OUTERMODE low part
1294 of the value in mode INNERMODE stored in memory in target format. */
1297 subreg_lowpart_offset (outermode, innermode)
1298 enum machine_mode outermode, innermode;
1300 unsigned int offset = 0;
1301 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1305 if (WORDS_BIG_ENDIAN)
1306 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1307 if (BYTES_BIG_ENDIAN)
1308 offset += difference % UNITS_PER_WORD;
1314 /* Return offset in bytes to get OUTERMODE high part
1315 of the value in mode INNERMODE stored in memory in target format. */
1317 subreg_highpart_offset (outermode, innermode)
1318 enum machine_mode outermode, innermode;
1320 unsigned int offset = 0;
1321 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1323 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1328 if (! WORDS_BIG_ENDIAN)
1329 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1330 if (! BYTES_BIG_ENDIAN)
1331 offset += difference % UNITS_PER_WORD;
1337 /* Return 1 iff X, assumed to be a SUBREG,
1338 refers to the least significant part of its containing reg.
1339 If X is not a SUBREG, always return 1 (it is its own low part!). */
1342 subreg_lowpart_p (x)
1345 if (GET_CODE (x) != SUBREG)
1347 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1350 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1351 == SUBREG_BYTE (x));
1355 /* Helper routine for all the constant cases of operand_subword.
1356 Some places invoke this directly. */
1359 constant_subword (op, offset, mode)
1362 enum machine_mode mode;
1364 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1367 /* If OP is already an integer word, return it. */
1368 if (GET_MODE_CLASS (mode) == MODE_INT
1369 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1372 /* The output is some bits, the width of the target machine's word.
1373 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1375 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1376 && GET_MODE_CLASS (mode) == MODE_FLOAT
1377 && GET_MODE_BITSIZE (mode) == 64
1378 && GET_CODE (op) == CONST_DOUBLE)
1383 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1384 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1386 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1387 which the words are written depends on the word endianness.
1388 ??? This is a potential portability problem and should
1389 be fixed at some point.
1391 We must exercise caution with the sign bit. By definition there
1392 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1393 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1394 So we explicitly mask and sign-extend as necessary. */
1395 if (BITS_PER_WORD == 32)
1398 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1399 return GEN_INT (val);
1401 #if HOST_BITS_PER_WIDE_INT >= 64
1402 else if (BITS_PER_WORD >= 64 && offset == 0)
1404 val = k[! WORDS_BIG_ENDIAN];
1405 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1406 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1407 return GEN_INT (val);
1410 else if (BITS_PER_WORD == 16)
1412 val = k[offset >> 1];
1413 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1415 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1416 return GEN_INT (val);
1421 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1422 && GET_MODE_CLASS (mode) == MODE_FLOAT
1423 && GET_MODE_BITSIZE (mode) > 64
1424 && GET_CODE (op) == CONST_DOUBLE)
1429 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1430 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1432 if (BITS_PER_WORD == 32)
1435 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1436 return GEN_INT (val);
1438 #if HOST_BITS_PER_WIDE_INT >= 64
1439 else if (BITS_PER_WORD >= 64 && offset <= 1)
1441 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1442 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1443 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1444 return GEN_INT (val);
1451 /* Single word float is a little harder, since single- and double-word
1452 values often do not have the same high-order bits. We have already
1453 verified that we want the only defined word of the single-word value. */
1454 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1455 && GET_MODE_BITSIZE (mode) == 32
1456 && GET_CODE (op) == CONST_DOUBLE)
1461 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1462 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1464 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1466 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1468 if (BITS_PER_WORD == 16)
1470 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1472 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1475 return GEN_INT (val);
1478 /* The only remaining cases that we can handle are integers.
1479 Convert to proper endianness now since these cases need it.
1480 At this point, offset == 0 means the low-order word.
1482 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1483 in general. However, if OP is (const_int 0), we can just return
1486 if (op == const0_rtx)
1489 if (GET_MODE_CLASS (mode) != MODE_INT
1490 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1491 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1494 if (WORDS_BIG_ENDIAN)
1495 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1497 /* Find out which word on the host machine this value is in and get
1498 it from the constant. */
1499 val = (offset / size_ratio == 0
1500 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1501 : (GET_CODE (op) == CONST_INT
1502 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1504 /* Get the value we want into the low bits of val. */
1505 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1506 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1508 val = trunc_int_for_mode (val, word_mode);
1510 return GEN_INT (val);
1513 /* Return subword OFFSET of operand OP.
1514 The word number, OFFSET, is interpreted as the word number starting
1515 at the low-order address. OFFSET 0 is the low-order word if not
1516 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1518 If we cannot extract the required word, we return zero. Otherwise,
1519 an rtx corresponding to the requested word will be returned.
1521 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1522 reload has completed, a valid address will always be returned. After
1523 reload, if a valid address cannot be returned, we return zero.
1525 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1526 it is the responsibility of the caller.
1528 MODE is the mode of OP in case it is a CONST_INT.
1530 ??? This is still rather broken for some cases. The problem for the
1531 moment is that all callers of this thing provide no 'goal mode' to
1532 tell us to work with. This exists because all callers were written
1533 in a word based SUBREG world.
1534 Now use of this function can be deprecated by simplify_subreg in most
1539 operand_subword (op, offset, validate_address, mode)
1541 unsigned int offset;
1542 int validate_address;
1543 enum machine_mode mode;
1545 if (mode == VOIDmode)
1546 mode = GET_MODE (op);
1548 if (mode == VOIDmode)
1551 /* If OP is narrower than a word, fail. */
1553 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1556 /* If we want a word outside OP, return zero. */
1558 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1561 /* Form a new MEM at the requested address. */
1562 if (GET_CODE (op) == MEM)
1564 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1566 if (! validate_address)
1569 else if (reload_completed)
1571 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1575 return replace_equiv_address (new, XEXP (new, 0));
1578 /* Rest can be handled by simplify_subreg. */
1579 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1582 /* Similar to `operand_subword', but never return 0. If we can't extract
1583 the required subword, put OP into a register and try again. If that fails,
1584 abort. We always validate the address in this case.
1586 MODE is the mode of OP, in case it is CONST_INT. */
1589 operand_subword_force (op, offset, mode)
1591 unsigned int offset;
1592 enum machine_mode mode;
1594 rtx result = operand_subword (op, offset, 1, mode);
1599 if (mode != BLKmode && mode != VOIDmode)
1601 /* If this is a register which can not be accessed by words, copy it
1602 to a pseudo register. */
1603 if (GET_CODE (op) == REG)
1604 op = copy_to_reg (op);
1606 op = force_reg (mode, op);
1609 result = operand_subword (op, offset, 1, mode);
1616 /* Given a compare instruction, swap the operands.
1617 A test instruction is changed into a compare of 0 against the operand. */
1620 reverse_comparison (insn)
1623 rtx body = PATTERN (insn);
1626 if (GET_CODE (body) == SET)
1627 comp = SET_SRC (body);
1629 comp = SET_SRC (XVECEXP (body, 0, 0));
1631 if (GET_CODE (comp) == COMPARE)
1633 rtx op0 = XEXP (comp, 0);
1634 rtx op1 = XEXP (comp, 1);
1635 XEXP (comp, 0) = op1;
1636 XEXP (comp, 1) = op0;
1640 rtx new = gen_rtx_COMPARE (VOIDmode,
1641 CONST0_RTX (GET_MODE (comp)), comp);
1642 if (GET_CODE (body) == SET)
1643 SET_SRC (body) = new;
1645 SET_SRC (XVECEXP (body, 0, 0)) = new;
1649 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1650 or (2) a component ref of something variable. Represent the later with
1651 a NULL expression. */
1654 component_ref_for_mem_expr (ref)
1657 tree inner = TREE_OPERAND (ref, 0);
1659 if (TREE_CODE (inner) == COMPONENT_REF)
1660 inner = component_ref_for_mem_expr (inner);
1663 tree placeholder_ptr = 0;
1665 /* Now remove any conversions: they don't change what the underlying
1666 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1667 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1668 || TREE_CODE (inner) == NON_LVALUE_EXPR
1669 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1670 || TREE_CODE (inner) == SAVE_EXPR
1671 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1672 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1673 inner = find_placeholder (inner, &placeholder_ptr);
1675 inner = TREE_OPERAND (inner, 0);
1677 if (! DECL_P (inner))
1681 if (inner == TREE_OPERAND (ref, 0))
1684 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1685 TREE_OPERAND (ref, 1));
1688 /* Given REF, a MEM, and T, either the type of X or the expression
1689 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1690 if we are making a new object of this type. */
1693 set_mem_attributes (ref, t, objectp)
1698 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1699 tree expr = MEM_EXPR (ref);
1700 rtx offset = MEM_OFFSET (ref);
1701 rtx size = MEM_SIZE (ref);
1702 unsigned int align = MEM_ALIGN (ref);
1705 /* It can happen that type_for_mode was given a mode for which there
1706 is no language-level type. In which case it returns NULL, which
1711 type = TYPE_P (t) ? t : TREE_TYPE (t);
1713 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1714 wrong answer, as it assumes that DECL_RTL already has the right alias
1715 info. Callers should not set DECL_RTL until after the call to
1716 set_mem_attributes. */
1717 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1720 /* Get the alias set from the expression or type (perhaps using a
1721 front-end routine) and use it. */
1722 alias = get_alias_set (t);
1724 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1725 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1726 RTX_UNCHANGING_P (ref)
1727 |= ((lang_hooks.honor_readonly
1728 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1729 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1731 /* If we are making an object of this type, or if this is a DECL, we know
1732 that it is a scalar if the type is not an aggregate. */
1733 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1734 MEM_SCALAR_P (ref) = 1;
1736 /* We can set the alignment from the type if we are making an object,
1737 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1738 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1739 align = MAX (align, TYPE_ALIGN (type));
1741 /* If the size is known, we can set that. */
1742 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1743 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1745 /* If T is not a type, we may be able to deduce some more information about
1749 maybe_set_unchanging (ref, t);
1750 if (TREE_THIS_VOLATILE (t))
1751 MEM_VOLATILE_P (ref) = 1;
1753 /* Now remove any conversions: they don't change what the underlying
1754 object is. Likewise for SAVE_EXPR. */
1755 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1756 || TREE_CODE (t) == NON_LVALUE_EXPR
1757 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1758 || TREE_CODE (t) == SAVE_EXPR)
1759 t = TREE_OPERAND (t, 0);
1761 /* If this expression can't be addressed (e.g., it contains a reference
1762 to a non-addressable field), show we don't change its alias set. */
1763 if (! can_address_p (t))
1764 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1766 /* If this is a decl, set the attributes of the MEM from it. */
1770 offset = const0_rtx;
1771 size = (DECL_SIZE_UNIT (t)
1772 && host_integerp (DECL_SIZE_UNIT (t), 1)
1773 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1774 align = DECL_ALIGN (t);
1777 /* If this is a constant, we know the alignment. */
1778 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1780 align = TYPE_ALIGN (type);
1781 #ifdef CONSTANT_ALIGNMENT
1782 align = CONSTANT_ALIGNMENT (t, align);
1786 /* If this is a field reference and not a bit-field, record it. */
1787 /* ??? There is some information that can be gleened from bit-fields,
1788 such as the word offset in the structure that might be modified.
1789 But skip it for now. */
1790 else if (TREE_CODE (t) == COMPONENT_REF
1791 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1793 expr = component_ref_for_mem_expr (t);
1794 offset = const0_rtx;
1795 /* ??? Any reason the field size would be different than
1796 the size we got from the type? */
1799 /* If this is an array reference, look for an outer field reference. */
1800 else if (TREE_CODE (t) == ARRAY_REF)
1802 tree off_tree = size_zero_node;
1807 = fold (build (PLUS_EXPR, sizetype,
1808 fold (build (MULT_EXPR, sizetype,
1809 TREE_OPERAND (t, 1),
1810 TYPE_SIZE_UNIT (TREE_TYPE (t)))),
1812 t = TREE_OPERAND (t, 0);
1814 while (TREE_CODE (t) == ARRAY_REF);
1816 if (TREE_CODE (t) == COMPONENT_REF)
1818 expr = component_ref_for_mem_expr (t);
1819 if (host_integerp (off_tree, 1))
1820 offset = GEN_INT (tree_low_cst (off_tree, 1));
1821 /* ??? Any reason the field size would be different than
1822 the size we got from the type? */
1827 /* Now set the attributes we computed above. */
1829 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1831 /* If this is already known to be a scalar or aggregate, we are done. */
1832 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1835 /* If it is a reference into an aggregate, this is part of an aggregate.
1836 Otherwise we don't know. */
1837 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1838 || TREE_CODE (t) == ARRAY_RANGE_REF
1839 || TREE_CODE (t) == BIT_FIELD_REF)
1840 MEM_IN_STRUCT_P (ref) = 1;
1843 /* Set the alias set of MEM to SET. */
1846 set_mem_alias_set (mem, set)
1850 #ifdef ENABLE_CHECKING
1851 /* If the new and old alias sets don't conflict, something is wrong. */
1852 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1856 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1857 MEM_SIZE (mem), MEM_ALIGN (mem),
1861 /* Set the alignment of MEM to ALIGN bits. */
1864 set_mem_align (mem, align)
1868 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1869 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1873 /* Set the expr for MEM to EXPR. */
1876 set_mem_expr (mem, expr)
1881 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1882 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1885 /* Set the offset of MEM to OFFSET. */
1888 set_mem_offset (mem, offset)
1891 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1892 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1896 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1897 and its address changed to ADDR. (VOIDmode means don't change the mode.
1898 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1899 returned memory location is required to be valid. The memory
1900 attributes are not changed. */
1903 change_address_1 (memref, mode, addr, validate)
1905 enum machine_mode mode;
1911 if (GET_CODE (memref) != MEM)
1913 if (mode == VOIDmode)
1914 mode = GET_MODE (memref);
1916 addr = XEXP (memref, 0);
1920 if (reload_in_progress || reload_completed)
1922 if (! memory_address_p (mode, addr))
1926 addr = memory_address (mode, addr);
1929 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1932 new = gen_rtx_MEM (mode, addr);
1933 MEM_COPY_ATTRIBUTES (new, memref);
1937 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1938 way we are changing MEMREF, so we only preserve the alias set. */
1941 change_address (memref, mode, addr)
1943 enum machine_mode mode;
1946 rtx new = change_address_1 (memref, mode, addr, 1);
1947 enum machine_mode mmode = GET_MODE (new);
1950 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
1951 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
1952 (mmode == BLKmode ? BITS_PER_UNIT
1953 : GET_MODE_ALIGNMENT (mmode)),
1959 /* Return a memory reference like MEMREF, but with its mode changed
1960 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1961 nonzero, the memory address is forced to be valid.
1962 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1963 and caller is responsible for adjusting MEMREF base register. */
1966 adjust_address_1 (memref, mode, offset, validate, adjust)
1968 enum machine_mode mode;
1969 HOST_WIDE_INT offset;
1970 int validate, adjust;
1972 rtx addr = XEXP (memref, 0);
1974 rtx memoffset = MEM_OFFSET (memref);
1976 unsigned int memalign = MEM_ALIGN (memref);
1978 /* ??? Prefer to create garbage instead of creating shared rtl.
1979 This may happen even if offset is non-zero -- consider
1980 (plus (plus reg reg) const_int) -- so do this always. */
1981 addr = copy_rtx (addr);
1985 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1986 object, we can merge it into the LO_SUM. */
1987 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1989 && (unsigned HOST_WIDE_INT) offset
1990 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1991 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1992 plus_constant (XEXP (addr, 1), offset));
1994 addr = plus_constant (addr, offset);
1997 new = change_address_1 (memref, mode, addr, validate);
1999 /* Compute the new values of the memory attributes due to this adjustment.
2000 We add the offsets and update the alignment. */
2002 memoffset = GEN_INT (offset + INTVAL (memoffset));
2004 /* Compute the new alignment by taking the MIN of the alignment and the
2005 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2010 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2012 /* We can compute the size in a number of ways. */
2013 if (GET_MODE (new) != BLKmode)
2014 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
2015 else if (MEM_SIZE (memref))
2016 size = plus_constant (MEM_SIZE (memref), -offset);
2018 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2019 memoffset, size, memalign, GET_MODE (new));
2021 /* At some point, we should validate that this offset is within the object,
2022 if all the appropriate values are known. */
2026 /* Return a memory reference like MEMREF, but with its mode changed
2027 to MODE and its address changed to ADDR, which is assumed to be
2028 MEMREF offseted by OFFSET bytes. If VALIDATE is
2029 nonzero, the memory address is forced to be valid. */
2032 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2034 enum machine_mode mode;
2036 HOST_WIDE_INT offset;
2039 memref = change_address_1 (memref, VOIDmode, addr, validate);
2040 return adjust_address_1 (memref, mode, offset, validate, 0);
2043 /* Return a memory reference like MEMREF, but whose address is changed by
2044 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2045 known to be in OFFSET (possibly 1). */
2048 offset_address (memref, offset, pow2)
2053 rtx new, addr = XEXP (memref, 0);
2055 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2057 /* At this point we don't know _why_ the address is invalid. It
2058 could have secondary memory refereces, multiplies or anything.
2060 However, if we did go and rearrange things, we can wind up not
2061 being able to recognize the magic around pic_offset_table_rtx.
2062 This stuff is fragile, and is yet another example of why it is
2063 bad to expose PIC machinery too early. */
2064 if (! memory_address_p (GET_MODE (memref), new)
2065 && GET_CODE (addr) == PLUS
2066 && XEXP (addr, 0) == pic_offset_table_rtx)
2068 addr = force_reg (GET_MODE (addr), addr);
2069 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2072 update_temp_slot_address (XEXP (memref, 0), new);
2073 new = change_address_1 (memref, VOIDmode, new, 1);
2075 /* Update the alignment to reflect the offset. Reset the offset, which
2078 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2079 MIN (MEM_ALIGN (memref),
2080 (unsigned HOST_WIDE_INT) pow2 * BITS_PER_UNIT),
2085 /* Return a memory reference like MEMREF, but with its address changed to
2086 ADDR. The caller is asserting that the actual piece of memory pointed
2087 to is the same, just the form of the address is being changed, such as
2088 by putting something into a register. */
2091 replace_equiv_address (memref, addr)
2095 /* change_address_1 copies the memory attribute structure without change
2096 and that's exactly what we want here. */
2097 update_temp_slot_address (XEXP (memref, 0), addr);
2098 return change_address_1 (memref, VOIDmode, addr, 1);
2101 /* Likewise, but the reference is not required to be valid. */
2104 replace_equiv_address_nv (memref, addr)
2108 return change_address_1 (memref, VOIDmode, addr, 0);
2111 /* Return a memory reference like MEMREF, but with its mode widened to
2112 MODE and offset by OFFSET. This would be used by targets that e.g.
2113 cannot issue QImode memory operations and have to use SImode memory
2114 operations plus masking logic. */
2117 widen_memory_access (memref, mode, offset)
2119 enum machine_mode mode;
2120 HOST_WIDE_INT offset;
2122 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2123 tree expr = MEM_EXPR (new);
2124 rtx memoffset = MEM_OFFSET (new);
2125 unsigned int size = GET_MODE_SIZE (mode);
2127 /* If we don't know what offset we were at within the expression, then
2128 we can't know if we've overstepped the bounds. */
2134 if (TREE_CODE (expr) == COMPONENT_REF)
2136 tree field = TREE_OPERAND (expr, 1);
2138 if (! DECL_SIZE_UNIT (field))
2144 /* Is the field at least as large as the access? If so, ok,
2145 otherwise strip back to the containing structure. */
2146 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2147 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2148 && INTVAL (memoffset) >= 0)
2151 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2157 expr = TREE_OPERAND (expr, 0);
2158 memoffset = (GEN_INT (INTVAL (memoffset)
2159 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2160 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2163 /* Similarly for the decl. */
2164 else if (DECL_P (expr)
2165 && DECL_SIZE_UNIT (expr)
2166 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2167 && (! memoffset || INTVAL (memoffset) >= 0))
2171 /* The widened memory access overflows the expression, which means
2172 that it could alias another expression. Zap it. */
2179 memoffset = NULL_RTX;
2181 /* The widened memory may alias other stuff, so zap the alias set. */
2182 /* ??? Maybe use get_alias_set on any remaining expression. */
2184 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2185 MEM_ALIGN (new), mode);
2190 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2197 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
2198 NULL_RTX, label_num++, NULL, NULL);
2200 LABEL_NUSES (label) = 0;
2201 LABEL_ALTERNATE_NAME (label) = NULL;
2205 /* For procedure integration. */
2207 /* Install new pointers to the first and last insns in the chain.
2208 Also, set cur_insn_uid to one higher than the last in use.
2209 Used for an inline-procedure after copying the insn chain. */
2212 set_new_first_and_last_insn (first, last)
2221 for (insn = first; insn; insn = NEXT_INSN (insn))
2222 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2227 /* Set the range of label numbers found in the current function.
2228 This is used when belatedly compiling an inline function. */
2231 set_new_first_and_last_label_num (first, last)
2234 base_label_num = label_num;
2235 first_label_num = first;
2236 last_label_num = last;
2239 /* Set the last label number found in the current function.
2240 This is used when belatedly compiling an inline function. */
2243 set_new_last_label_num (last)
2246 base_label_num = label_num;
2247 last_label_num = last;
2250 /* Restore all variables describing the current status from the structure *P.
2251 This is used after a nested function. */
2254 restore_emit_status (p)
2255 struct function *p ATTRIBUTE_UNUSED;
2258 clear_emit_caches ();
2261 /* Clear out all parts of the state in F that can safely be discarded
2262 after the function has been compiled, to let garbage collection
2263 reclaim the memory. */
2266 free_emit_status (f)
2269 free (f->emit->x_regno_reg_rtx);
2270 free (f->emit->regno_pointer_align);
2271 free (f->emit->regno_decl);
2276 /* Go through all the RTL insn bodies and copy any invalid shared
2277 structure. This routine should only be called once. */
2280 unshare_all_rtl (fndecl, insn)
2286 /* Make sure that virtual parameters are not shared. */
2287 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2288 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2290 /* Make sure that virtual stack slots are not shared. */
2291 unshare_all_decls (DECL_INITIAL (fndecl));
2293 /* Unshare just about everything else. */
2294 unshare_all_rtl_1 (insn);
2296 /* Make sure the addresses of stack slots found outside the insn chain
2297 (such as, in DECL_RTL of a variable) are not shared
2298 with the insn chain.
2300 This special care is necessary when the stack slot MEM does not
2301 actually appear in the insn chain. If it does appear, its address
2302 is unshared from all else at that point. */
2303 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2306 /* Go through all the RTL insn bodies and copy any invalid shared
2307 structure, again. This is a fairly expensive thing to do so it
2308 should be done sparingly. */
2311 unshare_all_rtl_again (insn)
2317 for (p = insn; p; p = NEXT_INSN (p))
2320 reset_used_flags (PATTERN (p));
2321 reset_used_flags (REG_NOTES (p));
2322 reset_used_flags (LOG_LINKS (p));
2325 /* Make sure that virtual stack slots are not shared. */
2326 reset_used_decls (DECL_INITIAL (cfun->decl));
2328 /* Make sure that virtual parameters are not shared. */
2329 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2330 reset_used_flags (DECL_RTL (decl));
2332 reset_used_flags (stack_slot_list);
2334 unshare_all_rtl (cfun->decl, insn);
2337 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2338 Assumes the mark bits are cleared at entry. */
2341 unshare_all_rtl_1 (insn)
2344 for (; insn; insn = NEXT_INSN (insn))
2347 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2348 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2349 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2353 /* Go through all virtual stack slots of a function and copy any
2354 shared structure. */
2356 unshare_all_decls (blk)
2361 /* Copy shared decls. */
2362 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2363 if (DECL_RTL_SET_P (t))
2364 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2366 /* Now process sub-blocks. */
2367 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2368 unshare_all_decls (t);
2371 /* Go through all virtual stack slots of a function and mark them as
2374 reset_used_decls (blk)
2380 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2381 if (DECL_RTL_SET_P (t))
2382 reset_used_flags (DECL_RTL (t));
2384 /* Now process sub-blocks. */
2385 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2386 reset_used_decls (t);
2389 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2390 placed in the result directly, rather than being copied. MAY_SHARE is
2391 either a MEM of an EXPR_LIST of MEMs. */
2394 copy_most_rtx (orig, may_share)
2401 const char *format_ptr;
2403 if (orig == may_share
2404 || (GET_CODE (may_share) == EXPR_LIST
2405 && in_expr_list_p (may_share, orig)))
2408 code = GET_CODE (orig);
2426 copy = rtx_alloc (code);
2427 PUT_MODE (copy, GET_MODE (orig));
2428 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2429 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2430 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2431 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2432 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2434 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2436 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2438 switch (*format_ptr++)
2441 XEXP (copy, i) = XEXP (orig, i);
2442 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2443 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2447 XEXP (copy, i) = XEXP (orig, i);
2452 XVEC (copy, i) = XVEC (orig, i);
2453 if (XVEC (orig, i) != NULL)
2455 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2456 for (j = 0; j < XVECLEN (copy, i); j++)
2457 XVECEXP (copy, i, j)
2458 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2463 XWINT (copy, i) = XWINT (orig, i);
2468 XINT (copy, i) = XINT (orig, i);
2472 XTREE (copy, i) = XTREE (orig, i);
2477 XSTR (copy, i) = XSTR (orig, i);
2481 /* Copy this through the wide int field; that's safest. */
2482 X0WINT (copy, i) = X0WINT (orig, i);
2492 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2493 Recursively does the same for subexpressions. */
2496 copy_rtx_if_shared (orig)
2502 const char *format_ptr;
2508 code = GET_CODE (x);
2510 /* These types may be freely shared. */
2524 /* SCRATCH must be shared because they represent distinct values. */
2528 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2529 a LABEL_REF, it isn't sharable. */
2530 if (GET_CODE (XEXP (x, 0)) == PLUS
2531 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2532 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2541 /* The chain of insns is not being copied. */
2545 /* A MEM is allowed to be shared if its address is constant.
2547 We used to allow sharing of MEMs which referenced
2548 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2549 that can lose. instantiate_virtual_regs will not unshare
2550 the MEMs, and combine may change the structure of the address
2551 because it looks safe and profitable in one context, but
2552 in some other context it creates unrecognizable RTL. */
2553 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2562 /* This rtx may not be shared. If it has already been seen,
2563 replace it with a copy of itself. */
2565 if (RTX_FLAG (x, used))
2569 copy = rtx_alloc (code);
2571 (sizeof (*copy) - sizeof (copy->fld)
2572 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2576 RTX_FLAG (x, used) = 1;
2578 /* Now scan the subexpressions recursively.
2579 We can store any replaced subexpressions directly into X
2580 since we know X is not shared! Any vectors in X
2581 must be copied if X was copied. */
2583 format_ptr = GET_RTX_FORMAT (code);
2585 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2587 switch (*format_ptr++)
2590 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2594 if (XVEC (x, i) != NULL)
2597 int len = XVECLEN (x, i);
2599 if (copied && len > 0)
2600 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2601 for (j = 0; j < len; j++)
2602 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2610 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2611 to look for shared sub-parts. */
2614 reset_used_flags (x)
2619 const char *format_ptr;
2624 code = GET_CODE (x);
2626 /* These types may be freely shared so we needn't do any resetting
2648 /* The chain of insns is not being copied. */
2655 RTX_FLAG (x, used) = 0;
2657 format_ptr = GET_RTX_FORMAT (code);
2658 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2660 switch (*format_ptr++)
2663 reset_used_flags (XEXP (x, i));
2667 for (j = 0; j < XVECLEN (x, i); j++)
2668 reset_used_flags (XVECEXP (x, i, j));
2674 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2675 Return X or the rtx for the pseudo reg the value of X was copied into.
2676 OTHER must be valid as a SET_DEST. */
2679 make_safe_from (x, other)
2683 switch (GET_CODE (other))
2686 other = SUBREG_REG (other);
2688 case STRICT_LOW_PART:
2691 other = XEXP (other, 0);
2697 if ((GET_CODE (other) == MEM
2699 && GET_CODE (x) != REG
2700 && GET_CODE (x) != SUBREG)
2701 || (GET_CODE (other) == REG
2702 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2703 || reg_mentioned_p (other, x))))
2705 rtx temp = gen_reg_rtx (GET_MODE (x));
2706 emit_move_insn (temp, x);
2712 /* Emission of insns (adding them to the doubly-linked list). */
2714 /* Return the first insn of the current sequence or current function. */
2722 /* Specify a new insn as the first in the chain. */
2725 set_first_insn (insn)
2728 if (PREV_INSN (insn) != 0)
2733 /* Return the last insn emitted in current sequence or current function. */
2741 /* Specify a new insn as the last in the chain. */
2744 set_last_insn (insn)
2747 if (NEXT_INSN (insn) != 0)
2752 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2755 get_last_insn_anywhere ()
2757 struct sequence_stack *stack;
2760 for (stack = seq_stack; stack; stack = stack->next)
2761 if (stack->last != 0)
2766 /* Return a number larger than any instruction's uid in this function. */
2771 return cur_insn_uid;
2774 /* Renumber instructions so that no instruction UIDs are wasted. */
2777 renumber_insns (stream)
2782 /* If we're not supposed to renumber instructions, don't. */
2783 if (!flag_renumber_insns)
2786 /* If there aren't that many instructions, then it's not really
2787 worth renumbering them. */
2788 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2793 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2796 fprintf (stream, "Renumbering insn %d to %d\n",
2797 INSN_UID (insn), cur_insn_uid);
2798 INSN_UID (insn) = cur_insn_uid++;
2802 /* Return the next insn. If it is a SEQUENCE, return the first insn
2811 insn = NEXT_INSN (insn);
2812 if (insn && GET_CODE (insn) == INSN
2813 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2814 insn = XVECEXP (PATTERN (insn), 0, 0);
2820 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2824 previous_insn (insn)
2829 insn = PREV_INSN (insn);
2830 if (insn && GET_CODE (insn) == INSN
2831 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2832 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2838 /* Return the next insn after INSN that is not a NOTE. This routine does not
2839 look inside SEQUENCEs. */
2842 next_nonnote_insn (insn)
2847 insn = NEXT_INSN (insn);
2848 if (insn == 0 || GET_CODE (insn) != NOTE)
2855 /* Return the previous insn before INSN that is not a NOTE. This routine does
2856 not look inside SEQUENCEs. */
2859 prev_nonnote_insn (insn)
2864 insn = PREV_INSN (insn);
2865 if (insn == 0 || GET_CODE (insn) != NOTE)
2872 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2873 or 0, if there is none. This routine does not look inside
2877 next_real_insn (insn)
2882 insn = NEXT_INSN (insn);
2883 if (insn == 0 || GET_CODE (insn) == INSN
2884 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2891 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2892 or 0, if there is none. This routine does not look inside
2896 prev_real_insn (insn)
2901 insn = PREV_INSN (insn);
2902 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2903 || GET_CODE (insn) == JUMP_INSN)
2910 /* Find the next insn after INSN that really does something. This routine
2911 does not look inside SEQUENCEs. Until reload has completed, this is the
2912 same as next_real_insn. */
2915 active_insn_p (insn)
2918 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2919 || (GET_CODE (insn) == INSN
2920 && (! reload_completed
2921 || (GET_CODE (PATTERN (insn)) != USE
2922 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2926 next_active_insn (insn)
2931 insn = NEXT_INSN (insn);
2932 if (insn == 0 || active_insn_p (insn))
2939 /* Find the last insn before INSN that really does something. This routine
2940 does not look inside SEQUENCEs. Until reload has completed, this is the
2941 same as prev_real_insn. */
2944 prev_active_insn (insn)
2949 insn = PREV_INSN (insn);
2950 if (insn == 0 || active_insn_p (insn))
2957 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2965 insn = NEXT_INSN (insn);
2966 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2973 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2981 insn = PREV_INSN (insn);
2982 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2990 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2991 and REG_CC_USER notes so we can find it. */
2994 link_cc0_insns (insn)
2997 rtx user = next_nonnote_insn (insn);
2999 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3000 user = XVECEXP (PATTERN (user), 0, 0);
3002 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3004 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3007 /* Return the next insn that uses CC0 after INSN, which is assumed to
3008 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3009 applied to the result of this function should yield INSN).
3011 Normally, this is simply the next insn. However, if a REG_CC_USER note
3012 is present, it contains the insn that uses CC0.
3014 Return 0 if we can't find the insn. */
3017 next_cc0_user (insn)
3020 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3023 return XEXP (note, 0);
3025 insn = next_nonnote_insn (insn);
3026 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3027 insn = XVECEXP (PATTERN (insn), 0, 0);
3029 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3035 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3036 note, it is the previous insn. */
3039 prev_cc0_setter (insn)
3042 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3045 return XEXP (note, 0);
3047 insn = prev_nonnote_insn (insn);
3048 if (! sets_cc0_p (PATTERN (insn)))
3055 /* Increment the label uses for all labels present in rtx. */
3058 mark_label_nuses (x)
3065 code = GET_CODE (x);
3066 if (code == LABEL_REF)
3067 LABEL_NUSES (XEXP (x, 0))++;
3069 fmt = GET_RTX_FORMAT (code);
3070 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3073 mark_label_nuses (XEXP (x, i));
3074 else if (fmt[i] == 'E')
3075 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3076 mark_label_nuses (XVECEXP (x, i, j));
3081 /* Try splitting insns that can be split for better scheduling.
3082 PAT is the pattern which might split.
3083 TRIAL is the insn providing PAT.
3084 LAST is non-zero if we should return the last insn of the sequence produced.
3086 If this routine succeeds in splitting, it returns the first or last
3087 replacement insn depending on the value of LAST. Otherwise, it
3088 returns TRIAL. If the insn to be returned can be split, it will be. */
3091 try_split (pat, trial, last)
3095 rtx before = PREV_INSN (trial);
3096 rtx after = NEXT_INSN (trial);
3097 int has_barrier = 0;
3102 if (any_condjump_p (trial)
3103 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3104 split_branch_probability = INTVAL (XEXP (note, 0));
3105 probability = split_branch_probability;
3107 seq = split_insns (pat, trial);
3109 split_branch_probability = -1;
3111 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3112 We may need to handle this specially. */
3113 if (after && GET_CODE (after) == BARRIER)
3116 after = NEXT_INSN (after);
3121 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
3122 The latter case will normally arise only when being done so that
3123 it, in turn, will be split (SFmode on the 29k is an example). */
3124 if (GET_CODE (seq) == SEQUENCE)
3128 /* Avoid infinite loop if any insn of the result matches
3129 the original pattern. */
3130 for (i = 0; i < XVECLEN (seq, 0); i++)
3131 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN
3132 && rtx_equal_p (PATTERN (XVECEXP (seq, 0, i)), pat))
3136 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3137 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
3139 rtx insn = XVECEXP (seq, 0, i);
3140 mark_jump_label (PATTERN (insn),
3141 XVECEXP (seq, 0, i), 0);
3143 if (probability != -1
3144 && any_condjump_p (insn)
3145 && !find_reg_note (insn, REG_BR_PROB, 0))
3147 /* We can preserve the REG_BR_PROB notes only if exactly
3148 one jump is created, otherwise the machine description
3149 is responsible for this step using
3150 split_branch_probability variable. */
3154 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3155 GEN_INT (probability),
3160 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3161 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3162 if (GET_CODE (trial) == CALL_INSN)
3163 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3164 if (GET_CODE (XVECEXP (seq, 0, i)) == CALL_INSN)
3165 CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
3166 = CALL_INSN_FUNCTION_USAGE (trial);
3168 /* Copy notes, particularly those related to the CFG. */
3169 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3171 switch (REG_NOTE_KIND (note))
3174 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3176 rtx insn = XVECEXP (seq, 0, i);
3177 if (GET_CODE (insn) == CALL_INSN
3178 || (flag_non_call_exceptions
3179 && may_trap_p (PATTERN (insn))))
3181 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3189 case REG_ALWAYS_RETURN:
3190 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3192 rtx insn = XVECEXP (seq, 0, i);
3193 if (GET_CODE (insn) == CALL_INSN)
3195 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3201 case REG_NON_LOCAL_GOTO:
3202 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3204 rtx insn = XVECEXP (seq, 0, i);
3205 if (GET_CODE (insn) == JUMP_INSN)
3207 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3218 /* If there are LABELS inside the split insns increment the
3219 usage count so we don't delete the label. */
3220 if (GET_CODE (trial) == INSN)
3221 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3222 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN)
3223 mark_label_nuses (PATTERN (XVECEXP (seq, 0, i)));
3225 tem = emit_insn_after (seq, trial);
3227 delete_insn (trial);
3229 emit_barrier_after (tem);
3231 /* Recursively call try_split for each new insn created; by the
3232 time control returns here that insn will be fully split, so
3233 set LAST and continue from the insn after the one returned.
3234 We can't use next_active_insn here since AFTER may be a note.
3235 Ignore deleted insns, which can be occur if not optimizing. */
3236 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3237 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3238 tem = try_split (PATTERN (tem), tem, 1);
3240 /* Avoid infinite loop if the result matches the original pattern. */
3241 else if (rtx_equal_p (seq, pat))
3245 PATTERN (trial) = seq;
3246 INSN_CODE (trial) = -1;
3247 try_split (seq, trial, last);
3250 /* Return either the first or the last insn, depending on which was
3253 ? (after ? PREV_INSN (after) : last_insn)
3254 : NEXT_INSN (before);
3260 /* Make and return an INSN rtx, initializing all its slots.
3261 Store PATTERN in the pattern slots. */
3264 make_insn_raw (pattern)
3269 insn = rtx_alloc (INSN);
3271 INSN_UID (insn) = cur_insn_uid++;
3272 PATTERN (insn) = pattern;
3273 INSN_CODE (insn) = -1;
3274 LOG_LINKS (insn) = NULL;
3275 REG_NOTES (insn) = NULL;
3277 #ifdef ENABLE_RTL_CHECKING
3280 && (returnjump_p (insn)
3281 || (GET_CODE (insn) == SET
3282 && SET_DEST (insn) == pc_rtx)))
3284 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3292 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
3295 make_jump_insn_raw (pattern)
3300 insn = rtx_alloc (JUMP_INSN);
3301 INSN_UID (insn) = cur_insn_uid++;
3303 PATTERN (insn) = pattern;
3304 INSN_CODE (insn) = -1;
3305 LOG_LINKS (insn) = NULL;
3306 REG_NOTES (insn) = NULL;
3307 JUMP_LABEL (insn) = NULL;
3312 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
3315 make_call_insn_raw (pattern)
3320 insn = rtx_alloc (CALL_INSN);
3321 INSN_UID (insn) = cur_insn_uid++;
3323 PATTERN (insn) = pattern;
3324 INSN_CODE (insn) = -1;
3325 LOG_LINKS (insn) = NULL;
3326 REG_NOTES (insn) = NULL;
3327 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3332 /* Add INSN to the end of the doubly-linked list.
3333 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3339 PREV_INSN (insn) = last_insn;
3340 NEXT_INSN (insn) = 0;
3342 if (NULL != last_insn)
3343 NEXT_INSN (last_insn) = insn;
3345 if (NULL == first_insn)
3351 /* Add INSN into the doubly-linked list after insn AFTER. This and
3352 the next should be the only functions called to insert an insn once
3353 delay slots have been filled since only they know how to update a
3357 add_insn_after (insn, after)
3360 rtx next = NEXT_INSN (after);
3363 if (optimize && INSN_DELETED_P (after))
3366 NEXT_INSN (insn) = next;
3367 PREV_INSN (insn) = after;
3371 PREV_INSN (next) = insn;
3372 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3373 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3375 else if (last_insn == after)
3379 struct sequence_stack *stack = seq_stack;
3380 /* Scan all pending sequences too. */
3381 for (; stack; stack = stack->next)
3382 if (after == stack->last)
3392 if (basic_block_for_insn
3393 && (unsigned int) INSN_UID (after) < basic_block_for_insn->num_elements
3394 && (bb = BLOCK_FOR_INSN (after)))
3396 set_block_for_insn (insn, bb);
3398 bb->flags |= BB_DIRTY;
3399 /* Should not happen as first in the BB is always
3400 either NOTE or LABEL. */
3401 if (bb->end == after
3402 /* Avoid clobbering of structure when creating new BB. */
3403 && GET_CODE (insn) != BARRIER
3404 && (GET_CODE (insn) != NOTE
3405 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3409 NEXT_INSN (after) = insn;
3410 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3412 rtx sequence = PATTERN (after);
3413 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3417 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3418 the previous should be the only functions called to insert an insn once
3419 delay slots have been filled since only they know how to update a
3423 add_insn_before (insn, before)
3426 rtx prev = PREV_INSN (before);
3429 if (optimize && INSN_DELETED_P (before))
3432 PREV_INSN (insn) = prev;
3433 NEXT_INSN (insn) = before;
3437 NEXT_INSN (prev) = insn;
3438 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3440 rtx sequence = PATTERN (prev);
3441 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3444 else if (first_insn == before)
3448 struct sequence_stack *stack = seq_stack;
3449 /* Scan all pending sequences too. */
3450 for (; stack; stack = stack->next)
3451 if (before == stack->first)
3453 stack->first = insn;
3461 if (basic_block_for_insn
3462 && (unsigned int) INSN_UID (before) < basic_block_for_insn->num_elements
3463 && (bb = BLOCK_FOR_INSN (before)))
3465 set_block_for_insn (insn, bb);
3467 bb->flags |= BB_DIRTY;
3468 /* Should not happen as first in the BB is always
3469 either NOTE or LABEl. */
3470 if (bb->head == insn
3471 /* Avoid clobbering of structure when creating new BB. */
3472 && GET_CODE (insn) != BARRIER
3473 && (GET_CODE (insn) != NOTE
3474 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3478 PREV_INSN (before) = insn;
3479 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3480 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3483 /* Remove an insn from its doubly-linked list. This function knows how
3484 to handle sequences. */
3489 rtx next = NEXT_INSN (insn);
3490 rtx prev = PREV_INSN (insn);
3495 NEXT_INSN (prev) = next;
3496 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3498 rtx sequence = PATTERN (prev);
3499 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3502 else if (first_insn == insn)
3506 struct sequence_stack *stack = seq_stack;
3507 /* Scan all pending sequences too. */
3508 for (; stack; stack = stack->next)
3509 if (insn == stack->first)
3511 stack->first = next;
3521 PREV_INSN (next) = prev;
3522 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3523 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3525 else if (last_insn == insn)
3529 struct sequence_stack *stack = seq_stack;
3530 /* Scan all pending sequences too. */
3531 for (; stack; stack = stack->next)
3532 if (insn == stack->last)
3541 if (basic_block_for_insn
3542 && (unsigned int) INSN_UID (insn) < basic_block_for_insn->num_elements
3543 && (bb = BLOCK_FOR_INSN (insn)))
3546 bb->flags |= BB_DIRTY;
3547 if (bb->head == insn)
3549 /* Never ever delete the basic block note without deleting whole
3551 if (GET_CODE (insn) == NOTE)
3555 if (bb->end == insn)
3560 /* Delete all insns made since FROM.
3561 FROM becomes the new last instruction. */
3564 delete_insns_since (from)
3570 NEXT_INSN (from) = 0;
3574 /* This function is deprecated, please use sequences instead.
3576 Move a consecutive bunch of insns to a different place in the chain.
3577 The insns to be moved are those between FROM and TO.
3578 They are moved to a new position after the insn AFTER.
3579 AFTER must not be FROM or TO or any insn in between.
3581 This function does not know about SEQUENCEs and hence should not be
3582 called after delay-slot filling has been done. */
3585 reorder_insns_nobb (from, to, after)
3586 rtx from, to, after;
3588 /* Splice this bunch out of where it is now. */
3589 if (PREV_INSN (from))
3590 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3592 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3593 if (last_insn == to)
3594 last_insn = PREV_INSN (from);
3595 if (first_insn == from)
3596 first_insn = NEXT_INSN (to);
3598 /* Make the new neighbors point to it and it to them. */
3599 if (NEXT_INSN (after))
3600 PREV_INSN (NEXT_INSN (after)) = to;
3602 NEXT_INSN (to) = NEXT_INSN (after);
3603 PREV_INSN (from) = after;
3604 NEXT_INSN (after) = from;
3605 if (after == last_insn)
3609 /* Same as function above, but take care to update BB boundaries. */
3611 reorder_insns (from, to, after)
3612 rtx from, to, after;
3614 rtx prev = PREV_INSN (from);
3615 basic_block bb, bb2;
3617 reorder_insns_nobb (from, to, after);
3619 if (basic_block_for_insn
3620 && (unsigned int) INSN_UID (after) < basic_block_for_insn->num_elements
3621 && (bb = BLOCK_FOR_INSN (after)))
3624 bb->flags |= BB_DIRTY;
3626 if (basic_block_for_insn
3627 && ((unsigned int) INSN_UID (from)
3628 < basic_block_for_insn->num_elements)
3629 && (bb2 = BLOCK_FOR_INSN (from)))
3633 bb2->flags |= BB_DIRTY;
3636 if (bb->end == after)
3639 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3640 set_block_for_insn (x, bb);
3644 /* Return the line note insn preceding INSN. */
3647 find_line_note (insn)
3650 if (no_line_numbers)
3653 for (; insn; insn = PREV_INSN (insn))
3654 if (GET_CODE (insn) == NOTE
3655 && NOTE_LINE_NUMBER (insn) >= 0)
3661 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3662 of the moved insns when debugging. This may insert a note between AFTER
3663 and FROM, and another one after TO. */
3666 reorder_insns_with_line_notes (from, to, after)
3667 rtx from, to, after;
3669 rtx from_line = find_line_note (from);
3670 rtx after_line = find_line_note (after);
3672 reorder_insns (from, to, after);
3674 if (from_line == after_line)
3678 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3679 NOTE_LINE_NUMBER (from_line),
3682 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3683 NOTE_LINE_NUMBER (after_line),
3687 /* Remove unnecessary notes from the instruction stream. */
3690 remove_unnecessary_notes ()
3692 rtx block_stack = NULL_RTX;
3693 rtx eh_stack = NULL_RTX;
3698 /* We must not remove the first instruction in the function because
3699 the compiler depends on the first instruction being a note. */
3700 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3702 /* Remember what's next. */
3703 next = NEXT_INSN (insn);
3705 /* We're only interested in notes. */
3706 if (GET_CODE (insn) != NOTE)
3709 switch (NOTE_LINE_NUMBER (insn))
3711 case NOTE_INSN_DELETED:
3712 case NOTE_INSN_LOOP_END_TOP_COND:
3716 case NOTE_INSN_EH_REGION_BEG:
3717 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3720 case NOTE_INSN_EH_REGION_END:
3721 /* Too many end notes. */
3722 if (eh_stack == NULL_RTX)
3724 /* Mismatched nesting. */
3725 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3728 eh_stack = XEXP (eh_stack, 1);
3729 free_INSN_LIST_node (tmp);
3732 case NOTE_INSN_BLOCK_BEG:
3733 /* By now, all notes indicating lexical blocks should have
3734 NOTE_BLOCK filled in. */
3735 if (NOTE_BLOCK (insn) == NULL_TREE)
3737 block_stack = alloc_INSN_LIST (insn, block_stack);
3740 case NOTE_INSN_BLOCK_END:
3741 /* Too many end notes. */
3742 if (block_stack == NULL_RTX)
3744 /* Mismatched nesting. */
3745 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3748 block_stack = XEXP (block_stack, 1);
3749 free_INSN_LIST_node (tmp);
3751 /* Scan back to see if there are any non-note instructions
3752 between INSN and the beginning of this block. If not,
3753 then there is no PC range in the generated code that will
3754 actually be in this block, so there's no point in
3755 remembering the existence of the block. */
3756 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3758 /* This block contains a real instruction. Note that we
3759 don't include labels; if the only thing in the block
3760 is a label, then there are still no PC values that
3761 lie within the block. */
3765 /* We're only interested in NOTEs. */
3766 if (GET_CODE (tmp) != NOTE)
3769 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3771 /* We just verified that this BLOCK matches us with
3772 the block_stack check above. Never delete the
3773 BLOCK for the outermost scope of the function; we
3774 can refer to names from that scope even if the
3775 block notes are messed up. */
3776 if (! is_body_block (NOTE_BLOCK (insn))
3777 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3784 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3785 /* There's a nested block. We need to leave the
3786 current block in place since otherwise the debugger
3787 wouldn't be able to show symbols from our block in
3788 the nested block. */
3794 /* Too many begin notes. */
3795 if (block_stack || eh_stack)
3800 /* Emit an insn of given code and pattern
3801 at a specified place within the doubly-linked list. */
3803 /* Make an instruction with body PATTERN
3804 and output it before the instruction BEFORE. */
3807 emit_insn_before (pattern, before)
3808 rtx pattern, before;
3812 if (GET_CODE (pattern) == SEQUENCE)
3816 for (i = 0; i < XVECLEN (pattern, 0); i++)
3818 insn = XVECEXP (pattern, 0, i);
3819 add_insn_before (insn, before);
3824 insn = make_insn_raw (pattern);
3825 add_insn_before (insn, before);
3831 /* Make an instruction with body PATTERN and code JUMP_INSN
3832 and output it before the instruction BEFORE. */
3835 emit_jump_insn_before (pattern, before)
3836 rtx pattern, before;
3840 if (GET_CODE (pattern) == SEQUENCE)
3841 insn = emit_insn_before (pattern, before);
3844 insn = make_jump_insn_raw (pattern);
3845 add_insn_before (insn, before);
3851 /* Make an instruction with body PATTERN and code CALL_INSN
3852 and output it before the instruction BEFORE. */
3855 emit_call_insn_before (pattern, before)
3856 rtx pattern, before;
3860 if (GET_CODE (pattern) == SEQUENCE)
3861 insn = emit_insn_before (pattern, before);
3864 insn = make_call_insn_raw (pattern);
3865 add_insn_before (insn, before);
3866 PUT_CODE (insn, CALL_INSN);
3872 /* Make an instruction with body PATTERN and code CALL_INSN
3873 and output it before the instruction BEFORE. */
3876 emit_call_insn_after (pattern, before)
3877 rtx pattern, before;
3881 if (GET_CODE (pattern) == SEQUENCE)
3882 insn = emit_insn_after (pattern, before);
3885 insn = make_call_insn_raw (pattern);
3886 add_insn_after (insn, before);
3887 PUT_CODE (insn, CALL_INSN);
3893 /* Make an insn of code BARRIER
3894 and output it before the insn BEFORE. */
3897 emit_barrier_before (before)
3900 rtx insn = rtx_alloc (BARRIER);
3902 INSN_UID (insn) = cur_insn_uid++;
3904 add_insn_before (insn, before);
3908 /* Emit the label LABEL before the insn BEFORE. */
3911 emit_label_before (label, before)
3914 /* This can be called twice for the same label as a result of the
3915 confusion that follows a syntax error! So make it harmless. */
3916 if (INSN_UID (label) == 0)
3918 INSN_UID (label) = cur_insn_uid++;
3919 add_insn_before (label, before);
3925 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3928 emit_note_before (subtype, before)
3932 rtx note = rtx_alloc (NOTE);
3933 INSN_UID (note) = cur_insn_uid++;
3934 NOTE_SOURCE_FILE (note) = 0;
3935 NOTE_LINE_NUMBER (note) = subtype;
3937 add_insn_before (note, before);
3941 /* Make an insn of code INSN with body PATTERN
3942 and output it after the insn AFTER. */
3945 emit_insn_after (pattern, after)
3950 if (GET_CODE (pattern) == SEQUENCE)
3954 for (i = 0; i < XVECLEN (pattern, 0); i++)
3956 insn = XVECEXP (pattern, 0, i);
3957 add_insn_after (insn, after);
3963 insn = make_insn_raw (pattern);
3964 add_insn_after (insn, after);
3970 /* Similar to emit_insn_after, except that line notes are to be inserted so
3971 as to act as if this insn were at FROM. */
3974 emit_insn_after_with_line_notes (pattern, after, from)
3975 rtx pattern, after, from;
3977 rtx from_line = find_line_note (from);
3978 rtx after_line = find_line_note (after);
3979 rtx insn = emit_insn_after (pattern, after);
3982 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3983 NOTE_LINE_NUMBER (from_line),
3987 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3988 NOTE_LINE_NUMBER (after_line),
3992 /* Make an insn of code JUMP_INSN with body PATTERN
3993 and output it after the insn AFTER. */
3996 emit_jump_insn_after (pattern, after)
4001 if (GET_CODE (pattern) == SEQUENCE)
4002 insn = emit_insn_after (pattern, after);
4005 insn = make_jump_insn_raw (pattern);
4006 add_insn_after (insn, after);
4012 /* Make an insn of code BARRIER
4013 and output it after the insn AFTER. */
4016 emit_barrier_after (after)
4019 rtx insn = rtx_alloc (BARRIER);
4021 INSN_UID (insn) = cur_insn_uid++;
4023 add_insn_after (insn, after);
4027 /* Emit the label LABEL after the insn AFTER. */
4030 emit_label_after (label, after)
4033 /* This can be called twice for the same label
4034 as a result of the confusion that follows a syntax error!
4035 So make it harmless. */
4036 if (INSN_UID (label) == 0)
4038 INSN_UID (label) = cur_insn_uid++;
4039 add_insn_after (label, after);
4045 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4048 emit_note_after (subtype, after)
4052 rtx note = rtx_alloc (NOTE);
4053 INSN_UID (note) = cur_insn_uid++;
4054 NOTE_SOURCE_FILE (note) = 0;
4055 NOTE_LINE_NUMBER (note) = subtype;
4056 add_insn_after (note, after);
4060 /* Emit a line note for FILE and LINE after the insn AFTER. */
4063 emit_line_note_after (file, line, after)
4070 if (no_line_numbers && line > 0)
4076 note = rtx_alloc (NOTE);
4077 INSN_UID (note) = cur_insn_uid++;
4078 NOTE_SOURCE_FILE (note) = file;
4079 NOTE_LINE_NUMBER (note) = line;
4080 add_insn_after (note, after);
4084 /* Make an insn of code INSN with pattern PATTERN
4085 and add it to the end of the doubly-linked list.
4086 If PATTERN is a SEQUENCE, take the elements of it
4087 and emit an insn for each element.
4089 Returns the last insn emitted. */
4095 rtx insn = last_insn;
4097 if (GET_CODE (pattern) == SEQUENCE)
4101 for (i = 0; i < XVECLEN (pattern, 0); i++)
4103 insn = XVECEXP (pattern, 0, i);
4109 insn = make_insn_raw (pattern);
4116 /* Emit the insns in a chain starting with INSN.
4117 Return the last insn emitted. */
4127 rtx next = NEXT_INSN (insn);
4136 /* Emit the insns in a chain starting with INSN and place them in front of
4137 the insn BEFORE. Return the last insn emitted. */
4140 emit_insns_before (insn, before)
4148 rtx next = NEXT_INSN (insn);
4149 add_insn_before (insn, before);
4157 /* Emit the insns in a chain starting with FIRST and place them in back of
4158 the insn AFTER. Return the last insn emitted. */
4161 emit_insns_after (first, after)
4175 if (basic_block_for_insn
4176 && (unsigned int) INSN_UID (after) < basic_block_for_insn->num_elements
4177 && (bb = BLOCK_FOR_INSN (after)))
4179 bb->flags |= BB_DIRTY;
4180 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4181 set_block_for_insn (last, bb);
4182 set_block_for_insn (last, bb);
4183 if (bb->end == after)
4187 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4190 after_after = NEXT_INSN (after);
4192 NEXT_INSN (after) = first;
4193 PREV_INSN (first) = after;
4194 NEXT_INSN (last) = after_after;
4196 PREV_INSN (after_after) = last;
4198 if (after == last_insn)
4203 /* Make an insn of code JUMP_INSN with pattern PATTERN
4204 and add it to the end of the doubly-linked list. */
4207 emit_jump_insn (pattern)
4210 if (GET_CODE (pattern) == SEQUENCE)
4211 return emit_insn (pattern);
4214 rtx insn = make_jump_insn_raw (pattern);
4220 /* Make an insn of code CALL_INSN with pattern PATTERN
4221 and add it to the end of the doubly-linked list. */
4224 emit_call_insn (pattern)
4227 if (GET_CODE (pattern) == SEQUENCE)
4228 return emit_insn (pattern);
4231 rtx insn = make_call_insn_raw (pattern);
4233 PUT_CODE (insn, CALL_INSN);
4238 /* Add the label LABEL to the end of the doubly-linked list. */
4244 /* This can be called twice for the same label
4245 as a result of the confusion that follows a syntax error!
4246 So make it harmless. */
4247 if (INSN_UID (label) == 0)
4249 INSN_UID (label) = cur_insn_uid++;
4255 /* Make an insn of code BARRIER
4256 and add it to the end of the doubly-linked list. */
4261 rtx barrier = rtx_alloc (BARRIER);
4262 INSN_UID (barrier) = cur_insn_uid++;
4267 /* Make an insn of code NOTE
4268 with data-fields specified by FILE and LINE
4269 and add it to the end of the doubly-linked list,
4270 but only if line-numbers are desired for debugging info. */
4273 emit_line_note (file, line)
4277 set_file_and_line_for_stmt (file, line);
4280 if (no_line_numbers)
4284 return emit_note (file, line);
4287 /* Make an insn of code NOTE
4288 with data-fields specified by FILE and LINE
4289 and add it to the end of the doubly-linked list.
4290 If it is a line-number NOTE, omit it if it matches the previous one. */
4293 emit_note (file, line)
4301 if (file && last_filename && !strcmp (file, last_filename)
4302 && line == last_linenum)
4304 last_filename = file;
4305 last_linenum = line;
4308 if (no_line_numbers && line > 0)
4314 note = rtx_alloc (NOTE);
4315 INSN_UID (note) = cur_insn_uid++;
4316 NOTE_SOURCE_FILE (note) = file;
4317 NOTE_LINE_NUMBER (note) = line;
4322 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4325 emit_line_note_force (file, line)
4330 return emit_line_note (file, line);
4333 /* Cause next statement to emit a line note even if the line number
4334 has not changed. This is used at the beginning of a function. */
4337 force_next_line_note ()
4342 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4343 note of this type already exists, remove it first. */
4346 set_unique_reg_note (insn, kind, datum)
4351 rtx note = find_reg_note (insn, kind, NULL_RTX);
4357 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4358 has multiple sets (some callers assume single_set
4359 means the insn only has one set, when in fact it
4360 means the insn only has one * useful * set). */
4361 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4368 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4369 It serves no useful purpose and breaks eliminate_regs. */
4370 if (GET_CODE (datum) == ASM_OPERANDS)
4380 XEXP (note, 0) = datum;
4384 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4385 return REG_NOTES (insn);
4388 /* Return an indication of which type of insn should have X as a body.
4389 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4395 if (GET_CODE (x) == CODE_LABEL)
4397 if (GET_CODE (x) == CALL)
4399 if (GET_CODE (x) == RETURN)
4401 if (GET_CODE (x) == SET)
4403 if (SET_DEST (x) == pc_rtx)
4405 else if (GET_CODE (SET_SRC (x)) == CALL)
4410 if (GET_CODE (x) == PARALLEL)
4413 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4414 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4416 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4417 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4419 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4420 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4426 /* Emit the rtl pattern X as an appropriate kind of insn.
4427 If X is a label, it is simply added into the insn chain. */
4433 enum rtx_code code = classify_insn (x);
4435 if (code == CODE_LABEL)
4436 return emit_label (x);
4437 else if (code == INSN)
4438 return emit_insn (x);
4439 else if (code == JUMP_INSN)
4441 rtx insn = emit_jump_insn (x);
4442 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4443 return emit_barrier ();
4446 else if (code == CALL_INSN)
4447 return emit_call_insn (x);
4452 /* Begin emitting insns to a sequence which can be packaged in an
4453 RTL_EXPR. If this sequence will contain something that might cause
4454 the compiler to pop arguments to function calls (because those
4455 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4456 details), use do_pending_stack_adjust before calling this function.
4457 That will ensure that the deferred pops are not accidentally
4458 emitted in the middle of this sequence. */
4463 struct sequence_stack *tem;
4465 tem = (struct sequence_stack *) xmalloc (sizeof (struct sequence_stack));
4467 tem->next = seq_stack;
4468 tem->first = first_insn;
4469 tem->last = last_insn;
4470 tem->sequence_rtl_expr = seq_rtl_expr;
4478 /* Similarly, but indicate that this sequence will be placed in T, an
4479 RTL_EXPR. See the documentation for start_sequence for more
4480 information about how to use this function. */
4483 start_sequence_for_rtl_expr (t)
4491 /* Set up the insn chain starting with FIRST as the current sequence,
4492 saving the previously current one. See the documentation for
4493 start_sequence for more information about how to use this function. */
4496 push_to_sequence (first)
4503 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4509 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4512 push_to_full_sequence (first, last)
4518 /* We really should have the end of the insn chain here. */
4519 if (last && NEXT_INSN (last))
4523 /* Set up the outer-level insn chain
4524 as the current sequence, saving the previously current one. */
4527 push_topmost_sequence ()
4529 struct sequence_stack *stack, *top = NULL;
4533 for (stack = seq_stack; stack; stack = stack->next)
4536 first_insn = top->first;
4537 last_insn = top->last;
4538 seq_rtl_expr = top->sequence_rtl_expr;
4541 /* After emitting to the outer-level insn chain, update the outer-level
4542 insn chain, and restore the previous saved state. */
4545 pop_topmost_sequence ()
4547 struct sequence_stack *stack, *top = NULL;
4549 for (stack = seq_stack; stack; stack = stack->next)
4552 top->first = first_insn;
4553 top->last = last_insn;
4554 /* ??? Why don't we save seq_rtl_expr here? */
4559 /* After emitting to a sequence, restore previous saved state.
4561 To get the contents of the sequence just made, you must call
4562 `gen_sequence' *before* calling here.
4564 If the compiler might have deferred popping arguments while
4565 generating this sequence, and this sequence will not be immediately
4566 inserted into the instruction stream, use do_pending_stack_adjust
4567 before calling gen_sequence. That will ensure that the deferred
4568 pops are inserted into this sequence, and not into some random
4569 location in the instruction stream. See INHIBIT_DEFER_POP for more
4570 information about deferred popping of arguments. */
4575 struct sequence_stack *tem = seq_stack;
4577 first_insn = tem->first;
4578 last_insn = tem->last;
4579 seq_rtl_expr = tem->sequence_rtl_expr;
4580 seq_stack = tem->next;
4585 /* This works like end_sequence, but records the old sequence in FIRST
4589 end_full_sequence (first, last)
4592 *first = first_insn;
4597 /* Return 1 if currently emitting into a sequence. */
4602 return seq_stack != 0;
4605 /* Generate a SEQUENCE rtx containing the insns already emitted
4606 to the current sequence.
4608 This is how the gen_... function from a DEFINE_EXPAND
4609 constructs the SEQUENCE that it returns. */
4619 /* Count the insns in the chain. */
4621 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
4624 /* If only one insn, return it rather than a SEQUENCE.
4625 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
4626 the case of an empty list.)
4627 We only return the pattern of an insn if its code is INSN and it
4628 has no notes. This ensures that no information gets lost. */
4630 && GET_CODE (first_insn) == INSN
4631 && ! RTX_FRAME_RELATED_P (first_insn)
4632 /* Don't throw away any reg notes. */
4633 && REG_NOTES (first_insn) == 0)
4634 return PATTERN (first_insn);
4636 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
4638 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
4639 XVECEXP (result, 0, i) = tem;
4644 /* Put the various virtual registers into REGNO_REG_RTX. */
4647 init_virtual_regs (es)
4648 struct emit_status *es;
4650 rtx *ptr = es->x_regno_reg_rtx;
4651 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4652 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4653 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4654 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4655 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4659 clear_emit_caches ()
4663 /* Clear the start_sequence/gen_sequence cache. */
4664 for (i = 0; i < SEQUENCE_RESULT_SIZE; i++)
4665 sequence_result[i] = 0;
4669 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4670 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4671 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4672 static int copy_insn_n_scratches;
4674 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4675 copied an ASM_OPERANDS.
4676 In that case, it is the original input-operand vector. */
4677 static rtvec orig_asm_operands_vector;
4679 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4680 copied an ASM_OPERANDS.
4681 In that case, it is the copied input-operand vector. */
4682 static rtvec copy_asm_operands_vector;
4684 /* Likewise for the constraints vector. */
4685 static rtvec orig_asm_constraints_vector;
4686 static rtvec copy_asm_constraints_vector;
4688 /* Recursively create a new copy of an rtx for copy_insn.
4689 This function differs from copy_rtx in that it handles SCRATCHes and
4690 ASM_OPERANDs properly.
4691 Normally, this function is not used directly; use copy_insn as front end.
4692 However, you could first copy an insn pattern with copy_insn and then use
4693 this function afterwards to properly copy any REG_NOTEs containing
4703 const char *format_ptr;
4705 code = GET_CODE (orig);
4722 for (i = 0; i < copy_insn_n_scratches; i++)
4723 if (copy_insn_scratch_in[i] == orig)
4724 return copy_insn_scratch_out[i];
4728 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4729 a LABEL_REF, it isn't sharable. */
4730 if (GET_CODE (XEXP (orig, 0)) == PLUS
4731 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4732 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4736 /* A MEM with a constant address is not sharable. The problem is that
4737 the constant address may need to be reloaded. If the mem is shared,
4738 then reloading one copy of this mem will cause all copies to appear
4739 to have been reloaded. */
4745 copy = rtx_alloc (code);
4747 /* Copy the various flags, and other information. We assume that
4748 all fields need copying, and then clear the fields that should
4749 not be copied. That is the sensible default behavior, and forces
4750 us to explicitly document why we are *not* copying a flag. */
4751 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
4753 /* We do not copy the USED flag, which is used as a mark bit during
4754 walks over the RTL. */
4755 RTX_FLAG (copy, used) = 0;
4757 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4758 if (GET_RTX_CLASS (code) == 'i')
4760 RTX_FLAG (copy, jump) = 0;
4761 RTX_FLAG (copy, call) = 0;
4762 RTX_FLAG (copy, frame_related) = 0;
4765 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4767 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4769 copy->fld[i] = orig->fld[i];
4770 switch (*format_ptr++)
4773 if (XEXP (orig, i) != NULL)
4774 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4779 if (XVEC (orig, i) == orig_asm_constraints_vector)
4780 XVEC (copy, i) = copy_asm_constraints_vector;
4781 else if (XVEC (orig, i) == orig_asm_operands_vector)
4782 XVEC (copy, i) = copy_asm_operands_vector;
4783 else if (XVEC (orig, i) != NULL)
4785 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4786 for (j = 0; j < XVECLEN (copy, i); j++)
4787 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4798 /* These are left unchanged. */
4806 if (code == SCRATCH)
4808 i = copy_insn_n_scratches++;
4809 if (i >= MAX_RECOG_OPERANDS)
4811 copy_insn_scratch_in[i] = orig;
4812 copy_insn_scratch_out[i] = copy;
4814 else if (code == ASM_OPERANDS)
4816 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4817 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4818 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4819 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4825 /* Create a new copy of an rtx.
4826 This function differs from copy_rtx in that it handles SCRATCHes and
4827 ASM_OPERANDs properly.
4828 INSN doesn't really have to be a full INSN; it could be just the
4834 copy_insn_n_scratches = 0;
4835 orig_asm_operands_vector = 0;
4836 orig_asm_constraints_vector = 0;
4837 copy_asm_operands_vector = 0;
4838 copy_asm_constraints_vector = 0;
4839 return copy_insn_1 (insn);
4842 /* Initialize data structures and variables in this file
4843 before generating rtl for each function. */
4848 struct function *f = cfun;
4850 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
4853 seq_rtl_expr = NULL;
4855 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4858 first_label_num = label_num;
4862 clear_emit_caches ();
4864 /* Init the tables that describe all the pseudo regs. */
4866 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4868 f->emit->regno_pointer_align
4869 = (unsigned char *) xcalloc (f->emit->regno_pointer_align_length,
4870 sizeof (unsigned char));
4873 = (rtx *) xcalloc (f->emit->regno_pointer_align_length, sizeof (rtx));
4876 = (tree *) xcalloc (f->emit->regno_pointer_align_length, sizeof (tree));
4878 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
4879 init_virtual_regs (f->emit);
4881 /* Indicate that the virtual registers and stack locations are
4883 REG_POINTER (stack_pointer_rtx) = 1;
4884 REG_POINTER (frame_pointer_rtx) = 1;
4885 REG_POINTER (hard_frame_pointer_rtx) = 1;
4886 REG_POINTER (arg_pointer_rtx) = 1;
4888 REG_POINTER (virtual_incoming_args_rtx) = 1;
4889 REG_POINTER (virtual_stack_vars_rtx) = 1;
4890 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
4891 REG_POINTER (virtual_outgoing_args_rtx) = 1;
4892 REG_POINTER (virtual_cfa_rtx) = 1;
4894 #ifdef STACK_BOUNDARY
4895 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
4896 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4897 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4898 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
4900 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
4901 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
4902 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
4903 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
4904 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
4907 #ifdef INIT_EXPANDERS
4912 /* Mark SS for GC. */
4915 mark_sequence_stack (ss)
4916 struct sequence_stack *ss;
4920 ggc_mark_rtx (ss->first);
4921 ggc_mark_tree (ss->sequence_rtl_expr);
4926 /* Mark ES for GC. */
4929 mark_emit_status (es)
4930 struct emit_status *es;
4939 for (i = es->regno_pointer_align_length, r = es->x_regno_reg_rtx,
4941 i > 0; --i, ++r, ++t)
4947 mark_sequence_stack (es->sequence_stack);
4948 ggc_mark_tree (es->sequence_rtl_expr);
4949 ggc_mark_rtx (es->x_first_insn);
4952 /* Generate the constant 0. */
4955 gen_const_vector_0 (mode)
4956 enum machine_mode mode;
4961 enum machine_mode inner;
4963 units = GET_MODE_NUNITS (mode);
4964 inner = GET_MODE_INNER (mode);
4966 v = rtvec_alloc (units);
4968 /* We need to call this function after we to set CONST0_RTX first. */
4969 if (!CONST0_RTX (inner))
4972 for (i = 0; i < units; ++i)
4973 RTVEC_ELT (v, i) = CONST0_RTX (inner);
4975 tem = gen_rtx_CONST_VECTOR (mode, v);
4979 /* Create some permanent unique rtl objects shared between all functions.
4980 LINE_NUMBERS is nonzero if line numbers are to be generated. */
4983 init_emit_once (line_numbers)
4987 enum machine_mode mode;
4988 enum machine_mode double_mode;
4990 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
4992 const_int_htab = htab_create (37, const_int_htab_hash,
4993 const_int_htab_eq, NULL);
4994 ggc_add_deletable_htab (const_int_htab, 0, 0);
4996 const_double_htab = htab_create (37, const_double_htab_hash,
4997 const_double_htab_eq, NULL);
4998 ggc_add_deletable_htab (const_double_htab, 0, 0);
5000 mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
5001 mem_attrs_htab_eq, NULL);
5002 ggc_add_deletable_htab (mem_attrs_htab, 0, mem_attrs_mark);
5004 no_line_numbers = ! line_numbers;
5006 /* Compute the word and byte modes. */
5008 byte_mode = VOIDmode;
5009 word_mode = VOIDmode;
5010 double_mode = VOIDmode;
5012 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5013 mode = GET_MODE_WIDER_MODE (mode))
5015 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5016 && byte_mode == VOIDmode)
5019 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5020 && word_mode == VOIDmode)
5024 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5025 mode = GET_MODE_WIDER_MODE (mode))
5027 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5028 && double_mode == VOIDmode)
5032 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5034 /* Assign register numbers to the globally defined register rtx.
5035 This must be done at runtime because the register number field
5036 is in a union and some compilers can't initialize unions. */
5038 pc_rtx = gen_rtx (PC, VOIDmode);
5039 cc0_rtx = gen_rtx (CC0, VOIDmode);
5040 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5041 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5042 if (hard_frame_pointer_rtx == 0)
5043 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5044 HARD_FRAME_POINTER_REGNUM);
5045 if (arg_pointer_rtx == 0)
5046 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5047 virtual_incoming_args_rtx =
5048 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5049 virtual_stack_vars_rtx =
5050 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5051 virtual_stack_dynamic_rtx =
5052 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5053 virtual_outgoing_args_rtx =
5054 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5055 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5057 /* These rtx must be roots if GC is enabled. */
5058 ggc_add_rtx_root (global_rtl, GR_MAX);
5060 #ifdef INIT_EXPANDERS
5061 /* This is to initialize {init|mark|free}_machine_status before the first
5062 call to push_function_context_to. This is needed by the Chill front
5063 end which calls push_function_context_to before the first call to
5064 init_function_start. */
5068 /* Create the unique rtx's for certain rtx codes and operand values. */
5070 /* Don't use gen_rtx here since gen_rtx in this case
5071 tries to use these variables. */
5072 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5073 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5074 gen_rtx_raw_CONST_INT (VOIDmode, i);
5075 ggc_add_rtx_root (const_int_rtx, 2 * MAX_SAVED_CONST_INT + 1);
5077 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5078 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5079 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5081 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5083 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5084 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5085 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5086 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5088 for (i = 0; i <= 2; i++)
5090 REAL_VALUE_TYPE *r =
5091 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5093 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5094 mode = GET_MODE_WIDER_MODE (mode))
5095 const_tiny_rtx[i][(int) mode] =
5096 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5098 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5100 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5101 mode = GET_MODE_WIDER_MODE (mode))
5102 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5104 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5106 mode = GET_MODE_WIDER_MODE (mode))
5107 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5110 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5112 mode = GET_MODE_WIDER_MODE (mode))
5113 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5115 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5117 mode = GET_MODE_WIDER_MODE (mode))
5118 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5120 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5121 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5122 const_tiny_rtx[0][i] = const0_rtx;
5124 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5125 if (STORE_FLAG_VALUE == 1)
5126 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5128 /* For bounded pointers, `&const_tiny_rtx[0][0]' is not the same as
5129 `(rtx *) const_tiny_rtx'. The former has bounds that only cover
5130 `const_tiny_rtx[0]', whereas the latter has bounds that cover all. */
5131 ggc_add_rtx_root ((rtx *) const_tiny_rtx, sizeof const_tiny_rtx / sizeof (rtx));
5132 ggc_add_rtx_root (&const_true_rtx, 1);
5134 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5135 return_address_pointer_rtx
5136 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5140 struct_value_rtx = STRUCT_VALUE;
5142 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5145 #ifdef STRUCT_VALUE_INCOMING
5146 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5148 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5149 struct_value_incoming_rtx
5150 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5152 struct_value_incoming_rtx = struct_value_rtx;
5156 #ifdef STATIC_CHAIN_REGNUM
5157 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5159 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5160 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5161 static_chain_incoming_rtx
5162 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5165 static_chain_incoming_rtx = static_chain_rtx;
5169 static_chain_rtx = STATIC_CHAIN;
5171 #ifdef STATIC_CHAIN_INCOMING
5172 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5174 static_chain_incoming_rtx = static_chain_rtx;
5178 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5179 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5181 ggc_add_rtx_root (&pic_offset_table_rtx, 1);
5182 ggc_add_rtx_root (&struct_value_rtx, 1);
5183 ggc_add_rtx_root (&struct_value_incoming_rtx, 1);
5184 ggc_add_rtx_root (&static_chain_rtx, 1);
5185 ggc_add_rtx_root (&static_chain_incoming_rtx, 1);
5186 ggc_add_rtx_root (&return_address_pointer_rtx, 1);
5189 /* Query and clear/ restore no_line_numbers. This is used by the
5190 switch / case handling in stmt.c to give proper line numbers in
5191 warnings about unreachable code. */
5194 force_line_numbers ()
5196 int old = no_line_numbers;
5198 no_line_numbers = 0;
5200 force_next_line_note ();
5205 restore_line_number_status (old_value)
5208 no_line_numbers = old_value;
5211 /* Produce exact duplicate of insn INSN after AFTER.
5212 Care updating of libcall regions if present. */
5215 emit_copy_of_insn_after (insn, after)
5219 rtx note1, note2, link;
5221 switch (GET_CODE (insn))
5224 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5228 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5232 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5233 if (CALL_INSN_FUNCTION_USAGE (insn))
5234 CALL_INSN_FUNCTION_USAGE (new)
5235 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5236 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5237 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5244 /* Update LABEL_NUSES. */
5245 mark_jump_label (PATTERN (new), new, 0);
5247 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5249 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5250 if (REG_NOTE_KIND (link) != REG_LABEL)
5252 if (GET_CODE (link) == EXPR_LIST)
5254 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5259 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5264 /* Fix the libcall sequences. */
5265 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5268 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5270 XEXP (note1, 0) = p;
5271 XEXP (note2, 0) = new;